Ejemplo n.º 1
0
def test_generate_text_1():
    """
    text -> |parser| -> AST -> |text generator| -> result
    assert result == text
    """
    pageName = 'PageName'
    text = """+ Heading 1
    A sentence, a link: [test], and some more text, even some *bold*. Something
    _simple_ to start.
    """
    wiki_content = {pageName: text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    ast = page.getLivePageAst()
    result = langHelper.generate_text(ast, page)
    assert result == text

    tf = langHelper.TextFormatter()
    result = tf.format(ast, page)
    assert result == text
    assert tf.count('heading') == 1
    assert tf.count('wikiWord') == 1
    assert tf.count('bold') == 1
    assert tf.count('italics') == 1
Ejemplo n.º 2
0
def test_generate_text_1():
    """
    text -> |parser| -> AST -> |text generator| -> result
    assert result == text
    """
    pageName = 'PageName'
    text = """+ Heading 1
    A sentence, a link: [test], and some more text, even some *bold*. Something
    _simple_ to start.
    """
    wiki_content = {pageName: text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    ast = page.getLivePageAst()
    result = langHelper.generate_text(ast, page)
    assert result == text

    tf = langHelper.TextFormatter()
    result = tf.format(ast, page)
    assert result == text
    assert tf.count('heading') == 1
    assert tf.count('wikiWord') == 1
    assert tf.count('bold') == 1
    assert tf.count('italics') == 1
Ejemplo n.º 3
0
def test_generate_text_3():
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    tests = [  # page name, text fragment, target
        ('PageName', '[//WikiWord]', 'WikiWord'),
        ('PageName', '[//wikiword]', 'wikiword'),
        ('PageName', '[/wikiword]', 'PageName/wikiword'),
        ('PageName', '[/wikiword/subsub]', 'PageName/wikiword/subsub'),
        ('PageName', '[.]', 'PageName'),
        ('PageName', 'PageName', 'PageName'),
        ('pageName', '[pageName]', 'pageName'),
        ('Main/SubPage', '[.]', 'Main/SubPage'),
        ('Main/SubPage', '[Test]', 'Main/Test'),
        ('Main/SubPage', '[..]', 'Main'),
        ('Main/SubPage', '[../Chair]', 'Chair'),
    ]
    for nr, (pageName, text_fragment, target) in enumerate(tests, 1):
        wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME)
        page = wikidoc.getWikiPage(pageName)
        text = '\n%s\n\n' % text_fragment
        page.setContent(text)
        ast = page.getLivePageAst()

        nf = NodeFinder(ast)
        assert nf.count('wikiWord') == 1
        link_core = nf.wikiWord().linkPath.getLinkCore()
        resolved = langHelper.resolveWikiWordLink(link_core, page)
        assert resolved == target, (
            '%d: %r on %r -> %r != %r' %
            (nr, link_core, pageName, resolved, target))
        result = langHelper.generate_text(ast, page)
        assert result == text
Ejemplo n.º 4
0
def test_generate_text_3():
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    tests = [  # page name, text fragment, target
        ('PageName', '[//WikiWord]', 'WikiWord'),
        ('PageName', '[//wikiword]', 'wikiword'),
        ('PageName', '[/wikiword]', 'PageName/wikiword'),
        ('PageName', '[/wikiword/subsub]', 'PageName/wikiword/subsub'),
        ('PageName', '[.]', 'PageName'),
        ('PageName', 'PageName', 'PageName'),
        ('pageName', '[pageName]', 'pageName'),
        ('Main/SubPage', '[.]', 'Main/SubPage'),
        ('Main/SubPage', '[Test]', 'Main/Test'),
        ('Main/SubPage', '[..]', 'Main'),
        ('Main/SubPage', '[../Chair]', 'Chair'),
    ]
    for nr, (pageName, text_fragment, target) in enumerate(tests, 1):
        wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME)
        page = wikidoc.getWikiPage(pageName)
        text = '\n%s\n\n' % text_fragment
        page.setContent(text)
        ast = page.getLivePageAst()

        nf = NodeFinder(ast)
        assert nf.count('wikiWord') == 1
        link_core = nf.wikiWord().linkPath.getLinkCore()
        resolved = langHelper.resolveWikiWordLink(link_core, page)
        assert resolved == target, ('%d: %r on %r -> %r != %r' % (nr,
            link_core, pageName, resolved, target))
        result = langHelper.generate_text(ast, page)
        assert result == text
Ejemplo n.º 5
0
def test_parse_wikiwords():
    page_name = u'PageName'
    text_fragments = [  # (text, is_wikiword)
        (u'WikiWord', None),
        (u'[[wikiword]]', u'wikiword'),
        (u'WikiWord!anchor', None),
        (u'[[WikiWord|title]]', u'WikiWord'),
        (u'[[WikiWord|title]]!anchor', u'WikiWord'),
        (u'[[WikiWord#search_fragment]]', u'WikiWord'),
        (u'[[WikiWord#search_fragment|title]]', u'WikiWord'),
        (u'[[WikiWord#search_fragment|title]]!anchor', u'WikiWord'),
        (u'[[.]]', page_name),
        (u'CamelCase is not seen as a WikiWord.', None),
    ]
    wiki_content = {page_name: u''}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    for (text_fragment, wikiword) in text_fragments:
        text = u'\n%s\n\n' % text_fragment
        page = wikidoc.getWikiPage(page_name)
        page.setContent(text)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        if wikiword is not None:
            assert nf.count('wikiWord') == 1
            assert nf.wikiWord().wikiWord == wikiword
        else:
            assert not nf.count('wikiWord')
Ejemplo n.º 6
0
def test_generate_text_1():
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    pageName = u'PageName'
    text = u"""
    [[test]]
    """
    wiki_content = {pageName: text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    ast = page.getLivePageAst()
    result = langHelper.generate_text(ast, page)
    assert result == text
Ejemplo n.º 7
0
def test_generate_text_1():
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    pageName = u'PageName'
    text = u"""
    [[test]]
    """
    wiki_content = {pageName: text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    ast = page.getLivePageAst()
    result = langHelper.generate_text(ast, page)
    assert result == text
Ejemplo n.º 8
0
def test_generate_text_quotes():
    test_values = '''
    ## input text -> expected output

    [person.email: "*****@*****.**"]      |  [person.email: "*****@*****.**"]
    [:page: "IncrementalSearch"]                |  [:page: IncrementalSearch]
    [:page: //IncrementalSearch//]              |  [:page: IncrementalSearch]
    [:page: /////IncrementalSearch/////]        |  [:page: IncrementalSearch]
    [:page: \\IncrementalSearch\\]              |  [:page: IncrementalSearch]
    [:eval:"5+6"]                               |  [:eval: "5+6"]
    [contact: "Carl [Home]"]                    |  [contact: "Carl [Home]"]
    [alias: Foo; Bar; FooBar]                   |  [alias: Foo; Bar; FooBar]
    [key: ""value 1 with " in it""; "value2"; "final #%! value ?"]  |  [key: ""value 1 with " in it""; value2; final #%! value ?]
    [key: """value"""""]                        | [key: """value"""""]
    [key: """value "special" ""a"" "b" """; c]  | [key: """value "special" ""a"" "b" """; c]
    [key: a;; b; c; ""d"""]                     | [key: a; b; c; ""d"""]
    [:rel: "children"; existingonly; columns 2; ""coldir down"""]  |  [:rel: children; existingonly; columns 2; ""coldir down"""]
    [key: "Elisabeth Willemslaan F. 156450, 113150 Wespelaar (Haacht) B"]  |  [key: "Elisabeth Willemslaan F. 156450, 113150 Wespelaar (Haacht) B"]

    '''

    def parse_test_values(s):
        for line in s.splitlines():
            line = line.strip()
            if not line or line.startswith('#'):
                continue
            input_text, output_text = [s.strip() for s in line.split('|')]
            yield input_text, output_text

    def err_msg():
        msg = '%r -> %r != %r'
        return msg % (text_fragment_in, result_fragment, text_fragment_out)

    pageName = 'PageName'
    wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    tests = parse_test_values(test_values)
    for text_fragment_in, text_fragment_out in tests:
        text_in = '\n%s\n\n' % text_fragment_in
        text_out = '\n%s\n\n' % text_fragment_out
        page.setContent(text_in)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        assert (nf.count('attribute') == 1 or
                nf.count('insertion') == 1), text_fragment_in
        result = langHelper.generate_text(ast, page)
        result_fragment = result.strip()
        assert result == text_out, err_msg()
Ejemplo n.º 9
0
def test_generate_text_quotes():
    test_values = '''
    ## input text -> expected output

    [person.email: "*****@*****.**"]      |  [person.email: "*****@*****.**"]
    [:page: "IncrementalSearch"]                |  [:page: IncrementalSearch]
    [:page: //IncrementalSearch//]              |  [:page: IncrementalSearch]
    [:page: /////IncrementalSearch/////]        |  [:page: IncrementalSearch]
    [:page: \\IncrementalSearch\\]              |  [:page: IncrementalSearch]
    [:eval:"5+6"]                               |  [:eval: "5+6"]
    [contact: "Carl [Home]"]                    |  [contact: "Carl [Home]"]
    [alias: Foo; Bar; FooBar]                   |  [alias: Foo; Bar; FooBar]
    [key: ""value 1 with " in it""; "value2"; "final #%! value ?"]  |  [key: ""value 1 with " in it""; value2; final #%! value ?]
    [key: """value"""""]                        | [key: """value"""""]
    [key: """value "special" ""a"" "b" """; c]  | [key: """value "special" ""a"" "b" """; c]
    [key: a;; b; c; ""d"""]                     | [key: a; b; c; ""d"""]
    [:rel: "children"; existingonly; columns 2; ""coldir down"""]  |  [:rel: children; existingonly; columns 2; ""coldir down"""]
    [key: "Elisabeth Willemslaan F. 156450, 113150 Wespelaar (Haacht) B"]  |  [key: "Elisabeth Willemslaan F. 156450, 113150 Wespelaar (Haacht) B"]

    '''

    def parse_test_values(s):
        for line in s.splitlines():
            line = line.strip()
            if not line or line.startswith('#'):
                continue
            input_text, output_text = [s.strip() for s in line.split('|')]
            yield input_text, output_text

    def err_msg():
        msg = '%r -> %r != %r'
        return msg % (text_fragment_in, result_fragment, text_fragment_out)

    pageName = 'PageName'
    wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    tests = parse_test_values(test_values)
    for text_fragment_in, text_fragment_out in tests:
        text_in = '\n%s\n\n' % text_fragment_in
        text_out = '\n%s\n\n' % text_fragment_out
        page.setContent(text_in)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        assert (nf.count('attribute') == 1
                or nf.count('insertion') == 1), text_fragment_in
        result = langHelper.generate_text(ast, page)
        result_fragment = result.strip()
        assert result == text_out, err_msg()
Ejemplo n.º 10
0
def test_parse_1():
    """
    getLivePageAst
    """
    text = """+ Heading 1

This is a sentence.

"""
    wiki_content = {'TestPage': text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    page = wikidoc.getWikiPage('TestPage')
    ast = page.getLivePageAst()
    nf = NodeFinder(ast)
    assert nf.count('heading') == 1
    assert nf.count('wikiWord') == 0
    assert nf.heading.headingContent().getString() == 'Heading 1'
    ast_ = parse(text, 'TestPage', LANGUAGE_NAME)
    assert ast_eq(ast, ast_)
Ejemplo n.º 11
0
def test_parse_1():
    """
    getLivePageAst
    """
    text = """+ Heading 1

This is a sentence.

"""
    wiki_content = {'TestPage': text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    page = wikidoc.getWikiPage('TestPage')
    ast = page.getLivePageAst()
    nf = NodeFinder(ast)
    assert nf.count('heading') == 1
    assert nf.count('wikiWord') == 0
    assert nf.heading.headingContent().getString() == 'Heading 1'
    ast_ = parse(text, 'TestPage', LANGUAGE_NAME)
    assert ast_eq(ast, ast_)
Ejemplo n.º 12
0
def test_generate_text():
    pageName = u'PageName'
    wikidoc = MockWikiDocument({pageName: u''}, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    text_fragments = [
        (u'[[wikiword]]', 'wikiWord'),  # 1
        (u'[[wikiword]]!anchor', 'wikiWord'),
        (u'[[wikiword|title]]', 'wikiWord'),
        (u'[[WikiWord|title]]', 'wikiWord'),
        (u'[[wikiword|title]]!anchor', 'wikiWord'),
        (u'[[WikiWord|title]]!anchor', 'wikiWord'),
        (u'[[wikiword#search_fragment]]', 'wikiWord'),
        (u'[[wikiword#search fragment]]', 'wikiWord'),
        (u'[[WikiWord#search# fragment]]', 'wikiWord'),
        (u'[[wikiword#search_fragment]]!anchor', 'wikiWord'),  # 10
        (u'[[WikiWord#search_fragment]]!anchor', 'wikiWord'),
        (u'[[wikiword#search_fragment|title]]', 'wikiWord'),
        (u'[[WikiWord#search_fragment|title]]', 'wikiWord'),
        (u'[[wikiword#search_fragment|title]]!anchor', 'wikiWord'),
        (u'[[WikiWord#search_fragment|title]]!anchor', 'wikiWord'),
        (u'WikiWord', None),
        (u'WikiWord!anchor', None),  # 17
        (u'WikiWord#search_fragment', None),
        (u'[[key: value]]', 'attribute'),
        (u'[[test: ok; nok]]', 'attribute'),
        (u'[[:page: wikiword]]', 'insertion'),
        (u'this is a sentence', None),
    ]
    for nr, (text_fragment, node_name) in enumerate(text_fragments, 1):
        text = u'\n%s\n\n' % text_fragment
        page.setContent(text)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        if node_name:
            assert nf.count(node_name) == 1, nr
        else:
            assert not nf.count('wikiWord'), nr
            assert not nf.count('attribute'), nr
            assert not nf.count('insertion'), nr
        result = langHelper.generate_text(ast, page)
        assert result == text
Ejemplo n.º 13
0
def test_parse_wikiwords():
    """
    assert text fragments are recognized as wiki words by parser
    """
    text_fragments = [  # (text, wikiword)
        ('WikiWord', 'WikiWord'),
        ('[wikiword]', 'wikiword'),
        ('WikiWord!anchor', 'WikiWord'),
        ('[WikiWord|title]', 'WikiWord'),
        ('[WikiWord|title]!anchor', 'WikiWord'),
        ('[wikiword]#searchfragment', 'wikiword'),
        ('[wikiword#searchfragment]', 'wikiword'),
        ('WikiWord#searchfragment', 'WikiWord'),
        ('WikiWord#search# fragment', 'WikiWord'),
        ('[WikiWord#search fragment]', 'WikiWord'),
        ('[wikiword#search fragment]', 'wikiword'),
        ('[WikiWord#searchfragment|title]', 'WikiWord'),
        ('[WikiWord#searchfragment|title]!anchor', 'WikiWord'),
        ('[.]', 'PageName'),
        ('This is a sentence', None),
        ('+ Heading\n\n    * item\n    * item 2\n\n', None),
        ('wikiword', None),
        ('wikiword!thisisnotananchor', None),
        ('wikiword#hash', None),
        ('wikiword|thisisnotitle', None),
    ]
    wiki_content = {'PageName': ''}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    for (text_fragment, wikiword) in text_fragments:
        text = '\n%s\n\n' % text_fragment
        page = wikidoc.getWikiPage('PageName')
        page.setContent(text)
        ast = page.getLivePageAst()
        assert ast.getString() == text
        nf = NodeFinder(ast)
        if wikiword is not None:
            assert nf.count('wikiWord') == 1
            assert nf.wikiWord().wikiWord == wikiword
        else:
            assert nf.count('wikiWord') == 0
Ejemplo n.º 14
0
def test_parse_wikiwords():
    """
    assert text fragments are recognized as wiki words by parser
    """
    text_fragments = [  # (text, wikiword)
        ('WikiWord', 'WikiWord'),
        ('[wikiword]', 'wikiword'),
        ('WikiWord!anchor', 'WikiWord'),
        ('[WikiWord|title]', 'WikiWord'),
        ('[WikiWord|title]!anchor', 'WikiWord'),
        ('[wikiword]#searchfragment', 'wikiword'),
        ('[wikiword#searchfragment]', 'wikiword'),
        ('WikiWord#searchfragment', 'WikiWord'),
        ('WikiWord#search# fragment', 'WikiWord'),
        ('[WikiWord#search fragment]', 'WikiWord'),
        ('[wikiword#search fragment]', 'wikiword'),
        ('[WikiWord#searchfragment|title]', 'WikiWord'),
        ('[WikiWord#searchfragment|title]!anchor', 'WikiWord'),
        ('[.]', 'PageName'),
        ('This is a sentence', None),
        ('+ Heading\n\n    * item\n    * item 2\n\n', None),
        ('wikiword', None),
        ('wikiword!thisisnotananchor', None),
        ('wikiword#hash', None),
        ('wikiword|thisisnotitle', None),
    ]
    wiki_content = {'PageName': ''}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    for (text_fragment, wikiword) in text_fragments:
        text = '\n%s\n\n' % text_fragment
        page = wikidoc.getWikiPage('PageName')
        page.setContent(text)
        ast = page.getLivePageAst()
        assert ast.getString() == text
        nf = NodeFinder(ast)
        if wikiword is not None:
            assert nf.count('wikiWord') == 1
            assert nf.wikiWord().wikiWord == wikiword
        else:
            assert nf.count('wikiWord') == 0
Ejemplo n.º 15
0
def test_generate_text_2():
    pageName = 'PageName'
    wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    text_fragments = [
        ('[wikiword]', 'wikiWord'),
        ('[wikiword]!anchor', 'wikiWord'),
        ('[wikiword|title]', 'wikiWord'),
        ('[WikiWord|title]', 'wikiWord'),
        ('[wikiword|title]!anchor', 'wikiWord'),
        ('[WikiWord|title]!anchor', 'wikiWord'),
        ('[wikiword#searchfragment]', 'wikiWord'),
        ('[wikiword#search fragment]', 'wikiWord'),
        ('[WikiWord#search# fragment]', 'wikiWord'),
        ('[wikiword#searchfragment]!anchor', 'wikiWord'),
        ('[WikiWord#searchfragment]!anchor', 'wikiWord'),
        ('[wikiword#searchfragment|title]', 'wikiWord'),
        ('[WikiWord#searchfragment|title]', 'wikiWord'),
        ('[wikiword#searchfragment|title]!anchor', 'wikiWord'),
        ('[WikiWord#searchfragment|title]!anchor', 'wikiWord'),
        ('WikiWord', 'wikiWord'),
        ('WikiWord!anchor', 'wikiWord'),
        ('WikiWord#searchfragment', 'wikiWord'),
        ('[key: value]', 'attribute'),
        ('[test: ok; nok]', 'attribute'),
        ('[:page: wikiword]', 'insertion'),
        ('this is a sentence', None),
    ]
    for text_fragment, node_name in text_fragments:
        text = '\n%s\n\n' % text_fragment
        page.setContent(text)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        if node_name:
            assert nf.count(node_name) == 1
        result = langHelper.generate_text(ast, page)
        assert result == text
Ejemplo n.º 16
0
def test_generate_text_2():
    pageName = 'PageName'
    wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME)
    page = wikidoc.getWikiPage(pageName)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    text_fragments = [
        ('[wikiword]', 'wikiWord'),
        ('[wikiword]!anchor', 'wikiWord'),
        ('[wikiword|title]', 'wikiWord'),
        ('[WikiWord|title]', 'wikiWord'),
        ('[wikiword|title]!anchor', 'wikiWord'),
        ('[WikiWord|title]!anchor', 'wikiWord'),
        ('[wikiword#searchfragment]', 'wikiWord'),
        ('[wikiword#search fragment]', 'wikiWord'),
        ('[WikiWord#search# fragment]', 'wikiWord'),
        ('[wikiword#searchfragment]!anchor', 'wikiWord'),
        ('[WikiWord#searchfragment]!anchor', 'wikiWord'),
        ('[wikiword#searchfragment|title]', 'wikiWord'),
        ('[WikiWord#searchfragment|title]', 'wikiWord'),
        ('[wikiword#searchfragment|title]!anchor', 'wikiWord'),
        ('[WikiWord#searchfragment|title]!anchor', 'wikiWord'),
        ('WikiWord', 'wikiWord'),
        ('WikiWord!anchor', 'wikiWord'),
        ('WikiWord#searchfragment', 'wikiWord'),
        ('[key: value]', 'attribute'),
        ('[test: ok; nok]', 'attribute'),
        ('[:page: wikiword]', 'insertion'),
        ('this is a sentence', None),
    ]
    for text_fragment, node_name in text_fragments:
        text = '\n%s\n\n' % text_fragment
        page.setContent(text)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        if node_name:
            assert nf.count(node_name) == 1
        result = langHelper.generate_text(ast, page)
        assert result == text
Ejemplo n.º 17
0
def parse_test_wikis_text_file(s, wiki_language_name):
    """Return dict. of test wiki content for testing:
    {wikiname: (wikiDoc, renameSeq)}."""
    ans = {}  # {wikiname: (wikiDoc, renameSeq)}

    def error_msg():
        return 'Parse error on line %d section %d: %r' % (line_nr, section,
                                                          line)

    wiki_start = re.compile(r'^= (?P<wikiname>\w+) =+$')
    wiki_end = re.compile(r'^={80}$')
    page_start = re.compile(r'^- (?P<pagename>[/\w]+) -+$')
    page_end = re.compile(r'^-{80}$')
    rename_seq_line = lambda line: '->' in line

    section = 0
    renameSeq = []
    for line_nr, line in enumerate(s.splitlines(True), 1):
        if section == 0:  # outside wiki
            # new wiki?
            m = wiki_start.match(line)
            if m:
                assert len(line.strip()) == 80, error_msg()
                wiki_name = m.group('wikiname')
                wiki_content = {}
                renameSeq = []
                section = 1
                continue

            assert not line.strip(), error_msg()

        elif section == 1:  # inside wiki, outside page
            # new page?
            m = page_start.match(line)
            if m:
                assert len(line.strip()) == 80, error_msg()
                page_name = m.group('pagename')
                page_lines = []
                section = 2
                continue

            # renameSeq?
            if rename_seq_line(line):
                for rename_couple in line.strip().split(','):
                    old_page_name, new_page_name = [
                        s.strip() for s in rename_couple.split('->')
                    ]
                    renameSeq.append((old_page_name, new_page_name))
                continue

            # end of wiki?
            m = wiki_end.match(line)
            if m:
                # we now have a complete wiki
                wikidoc = MockWikiDocument(wiki_content, wiki_language_name)
                assert wiki_name not in ans, error_msg()
                ans[wiki_name] = (wikidoc, renameSeq)
                section = 0
                continue

            assert not line.strip(), error_msg()

        elif section == 2:  # inside wiki, inside page
            # end of page?
            m = page_end.match(line)
            if m:
                # we now have a complete page
                page_content = ''.join(page_lines)
                assert page_name not in wiki_content, error_msg()
                wiki_content[page_name] = page_content
                section = 1
                continue

            page_lines.append(line)

    return ans
Ejemplo n.º 18
0
def test_generate_WikidPadHelp_selection():
    test_fragments = [  # (page_name, text, node_name, formatted_text)
        ('pageName', '[.]', 'wikiWord', '[.]'),  # 1
        ('PageName', '[.]', 'wikiWord', '[.]'),
        ('PageName', 'PageName', 'wikiWord', 'PageName'),
        ('PageName', '[PageName]', 'wikiWord', '[PageName]'),
        ('PageName', '[contact: "Carl [Home]"]', 'attribute',
         '[contact: "Carl [Home]"]'),
        ('PageName', '[//OptionsDialog]', 'wikiWord', '[//OptionsDialog]'),
        ('PageName', '[//ebay/Circlet]', 'wikiWord', '[//ebay/Circlet]'),
        ('PageName', '[WikiWord|   This is the title  ]', 'wikiWord',
         '[WikiWord|This is the title]'),
        ('PageName', '[:rel: parents]', 'insertion', '[:rel: parents]'),
        ('PageName', '[:rel: parents; aslist]', 'insertion',
         '[:rel: parents; aslist]'),
        ('PageName', '[:rel: children; existingonly;columns 2]', 'insertion',
         '[:rel: children; existingonly; columns 2]'),
        ('PageName', '[key: value]', 'attribute', '[key: value]'),
        ('PageName', '[:toc: ]', 'insertion', '[:toc:]'),
        (
            'ChangeLog2008',
            '[test:foo; ]',  # still legal?!
            'attribute',
            '[test: foo]'),
        (
            'TestPage',
            '[test:foo;; ]',  # still legal?!
            'attribute',
            '[test: foo]'),
        ('PageName', '[key: value with spaces]', 'attribute',
         '[key: value with spaces]'),
        ('PageName', '[key: value; value2]', 'attribute',
         '[key: value; value2]'),
        ('PageName', '[key: "value: with special char"]', 'attribute',
         '[key: value: with special char]'),
        ('PageName', '[key: "value = special"]', 'attribute',
         '[key: value = special]'),
        ('pageName', '[wikiword]#searchfragment', 'wikiWord',
         '[wikiword#searchfragment]'),
        ('pageName', '[wikiword#searchfragment]', 'wikiWord',
         '[wikiword#searchfragment]'),
        ('pageName', '[wikiword#search fragment]', 'wikiWord',
         '[wikiword#search fragment]'),
        ('AutoCompletion', '[bookmarked=true]', 'attribute',
         '[bookmarked: true]'),
        ('ChangeLog', '[ChangeLog2011]', 'wikiWord', '[ChangeLog2011]'),
        ('ChronViewWindow', '[OptionsDialog#+++ Chron. view]', 'wikiWord',
         '[OptionsDialog#+++ Chron. view]'),
        ('ChronViewWindow', '[OptionsDialog#+++ Chronological]', 'wikiWord',
         '[OptionsDialog#+++ Chronological]'),
        ('CommandLineSupport',
         '[WikiMaintenance#++ Update ext. modif. wiki files]', 'wikiWord',
         '[WikiMaintenance#++ Update ext. modif. wiki files]'),
        ('ExternalGraphicalApplications', '[:eqn:"a^2 + b^2 = c^2"]',
         'insertion', '[:eqn: "a^2 + b^2 = c^2"]'),
        ('Icon airbrush', '[icon:airbrush]', 'attribute', '[icon: airbrush]'),
        ('Icon cd_audio', '[icon:cd_audio ]', 'attribute', '[icon: cd_audio]'),
        ('Insertions', '[:page: "IncrementalSearch"]', 'insertion',
         '[:page: IncrementalSearch]'),
        ('Insertions', '[:page: "IncrementalSearch"]', 'insertion',
         '[:page: IncrementalSearch]'),
        ('Insertions', '[:rel: children;existingonly;columns 2;coldir down]',
         'insertion',
         '[:rel: children; existingonly; columns 2; coldir down]'),
        ('Insertions', '[:search:"todo:todo"]', 'insertion',
         '[:search: todo:todo]'),
        ('Insertions', '[:search:"todo:todo";showtext]', 'insertion',
         '[:search: todo:todo; showtext]'),
        ('Insertions', '[:eval:"5+6"]', 'insertion', '[:eval: "5+6"]'),
        ('ExternalGraphicalApplications',
         '[:dot:"\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]',
         'insertion',
         '[:dot: "\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]'
         ),
        ('ExternalGraphicalApplications',
         ('[:ploticus:"\n'
          '#proc areadef\n'
          '  title: Annual Revenues, in thousands\n'
          '  rectangle: 1 1 5 2\n'
          '  xrange: 0 4\n'
          '  yrange: -5000 15000\n'
          '  yaxis.stubs: incremental 5000\n'
          '  yaxis.grid: color=pink\n'
          '  xaxis.stubs: text\n'
          'ABC Corp\n'
          'NetStuff\n'
          'MicroMason\n'
          '\n'
          '#proc getdata\n'
          '  data: 6430 -780 13470\n'
          '\n'
          '#proc processdata\n'
          '  action: rotate\n'
          '\n'
          '#proc bars\n'
          '  lenfield: 1\n'
          '  color: dullyellow\n'
          '  labelword: $ @@N\n'
          '  crossover: 0\n'
          '"]'), 'insertion',
         ('[:ploticus: "\n#proc areadef\n  title: Annual Revenues, in '
          'thousands\n  rectangle: 1 1 5 2\n  xrange: 0 4\n  yrange: -5000 '
          '15000\n  yaxis.stubs: incremental 5000\n  yaxis.grid: color=pink\n'
          '  xaxis.stubs: text\nABC Corp\nNetStuff\nMicroMason\n\n'
          '#proc getdata\n  data: 6430 -780 13470\n\n#proc processdata\n'
          '  action: rotate\n\n#proc bars\n  lenfield: 1\n  color: dullyellow\n'
          '  labelword: $ @@N\n  crossover: 0\n"]')),
        ('ExternalGraphicalApplications', """[:gnuplot:"
set key right nobox
set samples 100
plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1))
"]""", 'insertion', """[:gnuplot: "
set key right nobox
set samples 100
plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1))
"]"""),
    ]
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    wikidoc = MockWikiDocument(None, LANGUAGE_NAME)
    tests = enumerate(test_fragments, 1)
    for nr, (pageName, text, node_name, formatted_text) in tests:
        text_ = '\n%s\n\n' % text
        try:
            page = wikidoc.getWikiPage(pageName)
        except WikiWordNotFoundException:
            page = wikidoc.createWikiPage(pageName)
        page.setContent(text_)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        if node_name is not None:
            assert nf.count(node_name) == 1
        else:
            assert nf.count('wikiWord') == 0
            assert nf.count('attribute') == 0
            assert nf.count('insertion') == 0
        result = langHelper.generate_text(ast, page)[1:-2]
        assert result == formatted_text, '%d: %r on %r -> %r != %r' % (
            nr, text, pageName, result, formatted_text)
Ejemplo n.º 19
0
def test_generate_WikidPadHelp():
    """Run over *complete* WikidPadHelp wiki: parse each page, generate text
    from AST using text generator, and check if generated text matches the
    original text::

        text -> |parser| -> AST -> |text generator| -> result
        assert result == text

    The *first time*, set `add_unknown_differences_to_annotation_file` to
    True and annotate generated file with differences: put '!=' if different,
    and '==' if equal (in semantics, maybe not in syntax, e.g.,
    [key:  value  ] is equal to [key: value] (note the extra spaces)).

    """
    # add_unknown_differences_to_annotation_file = True
    add_unknown_differences_to_annotation_file = False

    def load_annotations(path):
        equivalents = defaultdict(dict)
        known_differences = defaultdict(dict)
        page_name, text, result = None, None, None
        try:
            with io.open(path, 'r', encoding='utf-8') as f:
                for line in f:
                    line = line.strip()
                    if not line or line.startswith('#'):
                        continue
                    if line.startswith('-- '):
                        text = line[3:]
                    elif line.startswith('== '):
                        result = line[3:]
                        equivalents[page_name][text] = result
                    elif line.startswith('!= '):
                        result = line[3:]
                        known_differences[page_name][text] = result
                    else:
                        page_name = line
        except IOError:
            pass  # no file yet?!
        return equivalents, known_differences

    equivalents, known_differences = load_annotations(WIKIDPADHELP_ANNOTATIONS)

    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    wikidoc = MockWikiDocument(None, LANGUAGE_NAME)
    paths = glob.glob(os.path.join(WIKIDPADHELP_DATA_DIR, '*.wiki'))
    skip = set()

    # Page itself is in WikidPad syntax so it has to work as well

    #     skip = set(
    #         u'MediaWiki%2FTextFormatting',  # Media Wiki syntax, not WikidPadParser
    #     )
    nof_known_differences = 0
    nof_unknown_differences = 0
    for nr, path in enumerate(sorted(paths), 1):
        pageName, _ = os.path.splitext(os.path.basename(path))
        if pageName in skip:
            continue
        text = get_text(path)
        try:
            page = wikidoc.getWikiPage(pageName)
        except WikiWordNotFoundException:
            page = wikidoc.createWikiPage(pageName)
        page.setContent(text)

        ast = page.getLivePageAst()
        result = langHelper.generate_text(ast, page)
        # assert result == text

        current_page_correct = True
        with io.open(WIKIDPADHELP_ANNOTATIONS, 'a', encoding='utf-8') as f:
            to_compare = list(
                zip(result.splitlines(True), text.splitlines(True)))
            for result_line, text_line in to_compare:
                result_line = result_line.rstrip()
                text_line = text_line.rstrip()
                if result_line == text_line:
                    continue  # ok, equal
                try:
                    equivalent_line = equivalents[pageName][text_line]
                except KeyError:
                    equivalent_line = None
                if result_line == equivalent_line:
                    continue  # ok, lines are considered equal
                try:
                    known_difference = known_differences[pageName][text_line]
                except KeyError:
                    known_difference = None
                if result_line == known_difference:
                    nof_known_differences += 1
                    continue  # ok, we know about this difference

                # we have an unknown difference here
                nof_unknown_differences += 1
                if add_unknown_differences_to_annotation_file:
                    if current_page_correct:  # first error for this page
                        current_page_correct = False
                        f.write(pageName + '\n')
                    f.write('-- ' + text_line + '\n')
                    f.write('!= ' + result_line + '\n')

    msg = 'TOTAL: %d known differences, %d unknown differences'
    msg %= (nof_known_differences, nof_unknown_differences)
    assert not nof_unknown_differences, msg
Ejemplo n.º 20
0
def test_parse_2():
    text = """+ WikidPad

http://wikidpad.sourceforge.net/

WikidPad is an open source, [Python]-based wiki-like outliner for storing
thoughts, ideas, to-do lists, contacts, and other notes with wiki-like
linking between pages.

WikidPad!Help


++ Features

    * On the fly WikiWord linking, as you type
    * WikiWord auto-completion
    * Easy WikiWord navigation
    * Wiki text styling
    * ...


anchor:Help
++ Help

A complete online documentation for beginners and advanced users is here: GettingStarted.


WikiDocumentAttributes#*short_hint*

"""
    ast = parse(text, 'WikidPad', LANGUAGE_NAME)
    nf = NodeFinder(ast)
    assert nf.count('heading') == 3
    assert nf.count('urlLink') == 1
    assert nf.count('wikiWord') == 9  # title is also a wikiword...!
    assert nf.count('anchorDef') == 1
    assert nf.count('unorderedList') == 1
    assert nf.heading.headingContent().getString() == 'WikidPad'
    assert nf.urlLink().url == 'http://wikidpad.sourceforge.net/'
    ww_3_node = nf.wikiWord_3()
    assert ww_3_node.wikiWord == 'Python'
    assert ww_3_node.linkPath.getLinkCore() == 'Python'
    ww_9_node = nf.wikiWord_9()
    assert ww_9_node.wikiWord == 'WikiDocumentAttributes'
    assert ww_9_node.linkPath.getLinkCore() == 'WikiDocumentAttributes'
    assert ww_9_node.anchorLink is None
    assert ww_9_node.fragmentNode is not None
    assert ww_9_node.searchFragment == '*short_hint*'
    assert ww_9_node.titleNode is None
    begin = nf.unorderedList.bullet_3().pos
    end = nf.unorderedList.bullet_4().pos
    assert text[begin:end] == '* Easy WikiWord navigation\n    '
    assert nf.wikiWord_4().anchorLink == 'Help'
    assert nf[0].name == 'heading'
    assert nf[231].name == 'heading'
    assert nf[401].name == 'heading'

    wiki_content = {'WikidPad': text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    page = wikidoc.getWikiPage('WikidPad')
    ast_ = page.getLivePageAst()
    assert ast_eq(ast, ast_)
Ejemplo n.º 21
0
def test_parse_2():
    text = """+ WikidPad

http://wikidpad.sourceforge.net/

WikidPad is an open source, [Python]-based wiki-like outliner for storing
thoughts, ideas, to-do lists, contacts, and other notes with wiki-like
linking between pages.

WikidPad!Help


++ Features

    * On the fly WikiWord linking, as you type
    * WikiWord auto-completion
    * Easy WikiWord navigation
    * Wiki text styling
    * ...


anchor:Help
++ Help

A complete online documentation for beginners and advanced users is here: GettingStarted.


WikiDocumentAttributes#*short_hint*

"""
    ast = parse(text, 'WikidPad', LANGUAGE_NAME)
    nf = NodeFinder(ast)
    assert nf.count('heading') == 3
    assert nf.count('urlLink') == 1
    assert nf.count('wikiWord') == 9  # title is also a wikiword...!
    assert nf.count('anchorDef') == 1
    assert nf.count('unorderedList') == 1
    assert nf.heading.headingContent().getString() == 'WikidPad'
    assert nf.urlLink().url == 'http://wikidpad.sourceforge.net/'
    ww_3_node = nf.wikiWord_3()
    assert ww_3_node.wikiWord == 'Python'
    assert ww_3_node.linkPath.getLinkCore() == 'Python'
    ww_9_node = nf.wikiWord_9()
    assert ww_9_node.wikiWord == 'WikiDocumentAttributes'
    assert ww_9_node.linkPath.getLinkCore() == 'WikiDocumentAttributes'
    assert ww_9_node.anchorLink is None
    assert ww_9_node.fragmentNode is not None
    assert ww_9_node.searchFragment == '*short_hint*'
    assert ww_9_node.titleNode is None
    begin = nf.unorderedList.bullet_3().pos
    end = nf.unorderedList.bullet_4().pos
    assert text[begin:end] == '* Easy WikiWord navigation\n    '
    assert nf.wikiWord_4().anchorLink == 'Help'
    assert nf[0].name == 'heading'
    assert nf[231].name == 'heading'
    assert nf[401].name == 'heading'

    wiki_content = {'WikidPad': text}
    wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME)
    page = wikidoc.getWikiPage('WikidPad')
    ast_ = page.getLivePageAst()
    assert ast_eq(ast, ast_)
Ejemplo n.º 22
0
def test_WikiLink_resolve_and_create():
    """
    Resolve
    -------

    1. langHelper.resolveWikiWordLink(linkCore, basePage)
        == WikidPadParser.resolveWikiWordLink

    2. WikiLinkPath(linkCore).resolveWikiWord(basePath)


    Create
    ------

    1. langHelper.createRelativeLinkFromWikiWord(pageName, basePageName,
                                                 downwardOnly=False)

    2. langHelper.createWikiLinkPathFromPageName(targetPageName, basePageName,
                                                 absolute)

    """
    wikidoc = MockWikiDocument(None, LANGUAGE_NAME)
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    WikiLinkPath = langHelper.WikiLinkPath
    test_values = """
    ## linkCore            |  basePageName              |  targetPageName
    ##                    <- = create                   -> = resolve

    ## ABSOLUTE LINKS

    //ebay/Circlet         |  PageName                  |  ebay/Circlet
    //ebay/Circlet         |  ebay                      |  ebay/Circlet
    //ebay/Circlet         |  ebay/Circlet              |  ebay/Circlet
    //Foo/SubPage          |  PageName                  |  Foo/SubPage
    //Foo/SubPage          |  Foo                       |  Foo/SubPage
    //Foo/Foo              |  Foo                       |  Foo/Foo
    //Foo/Foo/Foo          |  Foo                       |  Foo/Foo/Foo

    ## RELATIVE LINKS

    /Couch                 |  ebay                      |  ebay/Couch
    /Couch                 |  ebay/Furniture            |  ebay/Furniture/Couch
    /Circlet               |  ebay                      |  ebay/Circlet
    /d/e                   |  a/b/c                     |  a/b/c/d/e
    /d                     |  a/b/c                     |  a/b/c/d
    /SubPage               |  Foo                       |  Foo/SubPage
    /Foo                   |  Foo                       |  Foo/Foo

    SubPage                |  Super/SubPage2            |  Super/SubPage
    SubPage               <-  Main/SubPage2             |  Main/SubPage
    WikiWord               |  PageName                  |  WikiWord
    Chaise                 |  ebay/Couch                |  ebay/Chaise
    Circlet                |  ebay/Cerebrum             |  ebay/Circlet
    OldCar                 |  ebay/Cerebrum             |  ebay/OldCar
    x                      |  a/b/c                     |  a/b/x
    ebay/Circlet           |  PageName                  |  ebay/Circlet
    Foo/SubPage            |  PageName                  |  Foo/SubPage

    .                      |  a/b/c                     ->  a/b/c
    c                      |  a/b/c                     ->  a/b/c
    c                     <-  a/b/c                     |  a/b/c
    .                      |  _                         ->  _
    .                      |  a/b/c                     -> a/b/c
    ../../a/b/c            |  a/b/c                     -> a/b/c
    Foo                    |  Foo                       -> Foo
    .                      |  Foo                       ->  Foo
    Foo                   <-  Foo                       |  Foo

    ..                     |  a/b/c                     -> a/b
    ..                     |  a/b/c                     |  a/b
    ..                     |  TestWiki/SubPage          |  TestWiki
    ..                     |  ebay/Couch/BuyerAddress   |  ebay/Couch
    ..                     |  TestWiki/SubPage          |  TestWiki
    ..                     |  a/b/c                     |  a/b
    ../TestWiki            |  TestWiki/SubPage          -> TestWiki
    ../x                   |  a/b/c                     |  a/x
    ..                     |  TestWiki/SubPage          |  TestWiki
    ../..                  |  ebay/Couch/BuyerAddress   |  ebay
    ../..                  |  a/b/c                     -> a
    ../..                 <-  a/b/c                     |  a
    ../..                  |  a/b/c/d                   |  a/b

    ../../Amazon           |  ebay/Couch/BuyerAddress   |  Amazon
    ../../y                |  a/b/c                     |  y
    ../../d/e/f            |  a/b/c                     -> d/e/f
    ../Super/SubPage       |  Main/SubPage2             |  Super/SubPage
    ../c/d                 |  a/b                       |  c/d
    ../c                   |  a/b                       |  c
    ../../d                |  a/b/c                     |  d
    ../../d/e/f            |  a/b/c                     |  d/e/f
    ../../d/e              |  a/b/c                     |  d/e
    ../main1renamed        |  main1renamed/sub          -> main1renamed

    //                     |  a                         -> VALUE_ERROR
    ../                    |  a/b/c                     -> VALUE_ERROR
    ../../                 |  a/b/c/d                   -> VALUE_ERROR
    EMPTY_STRING           |  PageName                  -> VALUE_ERROR
    ..                     |  TopLevel                  -> VALUE_ERROR
    ../..                  |  TopLevel                  -> VALUE_ERROR

    VALUE_ERROR            <-  TestPage                 |  EMPTY_STRING
    """

    values = parse_columns(test_values)

    def tests(direction):
        return values[direction]

    # ->

    def resolve_v1(linkCore, basePageName):
        try:
            basePage = wikidoc.getWikiPage(basePageName)
        except WikiWordNotFoundException:
            basePage = wikidoc.createWikiPage(basePageName)
        return langHelper.resolveWikiWordLink(linkCore, basePage)

    def resolve_v2(linkCore, basePageName):
        linkPath = WikiLinkPath(linkCore)
        basePath = langHelper.WikiLinkPath(pageName=basePageName)
        ans = linkPath.resolveWikiWord(basePath)
        return ans

    def left_to_right_err_msg_1(ver):
        msg = 'ver %d: resolve link %r on %r = %r != %r'
        return msg % (ver, linkCore, basePageName, res, targetPageName)

    def left_to_right_err_msg_2(ver):
        msg = 'ver %d: link %r on %r !-> %r'
        return msg % (ver, linkCore, basePageName, targetPageName)

    for linkCore, basePageName, targetPageName in tests('left_to_right'):
        if not isinstance(targetPageName, Exception):
            res = resolve_v1(linkCore, basePageName)
            assert res == targetPageName, left_to_right_err_msg_1(1)
            res = resolve_v2(linkCore, basePageName)
            assert res == targetPageName, left_to_right_err_msg_1(2)
        else:
            exc = type(targetPageName)
            with pytest.raises(exc, message=left_to_right_err_msg_2(1)):
                res = resolve_v1(linkCore, basePageName)
            with pytest.raises(exc, message=left_to_right_err_msg_2(2)):
                resolve_v2(linkCore, basePageName)

    # <-

    def create_v1(targetPageName, basePageName, absolute):
        if absolute:
            if not targetPageName:
                raise ValueError
            return '//' + targetPageName
        else:
            return langHelper.createRelativeLinkFromWikiWord(
                targetPageName, basePageName, downwardOnly=False)

    def create_v2(targetPageName, basePageName, absolute):
        linkPath = langHelper.createWikiLinkPathFromPageName(
            targetPageName, basePageName, absolute)
        return linkPath.getLinkCore()

    def right_to_left_err_msg_1(ver):
        msg = 'ver %d: create link to %r on %r = %r != %r'
        return msg % (ver, targetPageName, basePageName, res, linkCore)

    def right_to_left_err_msg_2(ver):
        msg = 'ver %d: link to %r on %r !-> %r'
        return msg % (ver, targetPageName, basePageName, linkCore)

    for linkCore, basePageName, targetPageName in tests('right_to_left'):
        if not isinstance(linkCore, Exception):
            absolute = linkCore.startswith('//')
            res = create_v1(targetPageName, basePageName, absolute)
            assert res == linkCore, right_to_left_err_msg_1(1)
            res = create_v2(targetPageName, basePageName, absolute)
            assert res == linkCore, right_to_left_err_msg_1(2)
        else:
            exc = type(linkCore)
            with pytest.raises(exc, message=right_to_left_err_msg_2(1)):
                create_v1(targetPageName, basePageName, False)
            with pytest.raises(exc, message=right_to_left_err_msg_2(2)):
                create_v2(targetPageName, basePageName, False)
Ejemplo n.º 23
0
def test_generate_WikidPadHelp_selection():
    test_fragments = [  # (page_name, text, node_name, formatted_text)
        ('pageName', '[.]',
         'wikiWord',  '[.]'),  # 1
        ('PageName', '[.]',
         'wikiWord',  '[.]'),
        ('PageName', 'PageName',
         'wikiWord',  'PageName'),
        ('PageName', '[PageName]',
         'wikiWord',  '[PageName]'),
        ('PageName', '[contact: "Carl [Home]"]',
         'attribute', '[contact: "Carl [Home]"]'),
        ('PageName', '[//OptionsDialog]',
         'wikiWord',  '[//OptionsDialog]'),
        ('PageName', '[//ebay/Circlet]', 'wikiWord',  '[//ebay/Circlet]'),
        ('PageName', '[WikiWord|   This is the title  ]',
         'wikiWord',  '[WikiWord|This is the title]'),
        ('PageName', '[:rel: parents]', 'insertion', '[:rel: parents]'),
        ('PageName', '[:rel: parents; aslist]',
         'insertion', '[:rel: parents; aslist]'),
        ('PageName', '[:rel: children; existingonly;columns 2]',
         'insertion', '[:rel: children; existingonly; columns 2]'),
        ('PageName', '[key: value]',
         'attribute', '[key: value]'),
        ('PageName', '[:toc: ]',
         'insertion', '[:toc:]'),
        ('ChangeLog2008', '[test:foo; ]',  # still legal?!
         'attribute',      '[test: foo]'),
        ('TestPage', '[test:foo;; ]',  # still legal?!
         'attribute', '[test: foo]'),
        ('PageName', '[key: value with spaces]',
         'attribute', '[key: value with spaces]'),
        ('PageName', '[key: value; value2]',
         'attribute', '[key: value; value2]'),
        ('PageName', '[key: "value: with special char"]',
         'attribute', '[key: value: with special char]'),
        ('PageName', '[key: "value = special"]',
         'attribute', '[key: value = special]'),
        ('pageName', '[wikiword]#searchfragment',
         'wikiWord',  '[wikiword#searchfragment]'),
        ('pageName', '[wikiword#searchfragment]',
         'wikiWord',  '[wikiword#searchfragment]'),
        ('pageName', '[wikiword#search fragment]',
         'wikiWord',  '[wikiword#search fragment]'),
        ('AutoCompletion', '[bookmarked=true]',
         'attribute',       '[bookmarked: true]'),
        ('ChangeLog', '[ChangeLog2011]',
         'wikiWord',   '[ChangeLog2011]'),
        ('ChronViewWindow', '[OptionsDialog#+++ Chron. view]',
         'wikiWord',         '[OptionsDialog#+++ Chron. view]'),
        ('ChronViewWindow', '[OptionsDialog#+++ Chronological]',
         'wikiWord',         '[OptionsDialog#+++ Chronological]'),
        ('CommandLineSupport', '[WikiMaintenance#++ Update ext. modif. wiki files]',
         'wikiWord',            '[WikiMaintenance#++ Update ext. modif. wiki files]'),
        ('ExternalGraphicalApplications', '[:eqn:"a^2 + b^2 = c^2"]',
         'insertion',                      '[:eqn: "a^2 + b^2 = c^2"]'),
        ('Icon airbrush', '[icon:airbrush]',
         'attribute',      '[icon: airbrush]'),
        ('Icon cd_audio', '[icon:cd_audio ]',
         'attribute',      '[icon: cd_audio]'),
        ('Insertions', '[:page: "IncrementalSearch"]',
         'insertion',   '[:page: IncrementalSearch]'),
        ('Insertions', '[:page: "IncrementalSearch"]',
         'insertion',   '[:page: IncrementalSearch]'),
        ('Insertions', '[:rel: children;existingonly;columns 2;coldir down]',
         'insertion',   '[:rel: children; existingonly; columns 2; coldir down]'),
        ('Insertions', '[:search:"todo:todo"]',
         'insertion',  '[:search: todo:todo]'),
        ('Insertions', '[:search:"todo:todo";showtext]',
         'insertion',   '[:search: todo:todo; showtext]'),
        ('Insertions', '[:eval:"5+6"]',
         'insertion',   '[:eval: "5+6"]'),
        ('ExternalGraphicalApplications',
         '[:dot:"\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]',
         'insertion',
         '[:dot: "\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]'),
        ('ExternalGraphicalApplications',
         ('[:ploticus:"\n'
          '#proc areadef\n'
          '  title: Annual Revenues, in thousands\n'
          '  rectangle: 1 1 5 2\n'
          '  xrange: 0 4\n'
          '  yrange: -5000 15000\n'
          '  yaxis.stubs: incremental 5000\n'
          '  yaxis.grid: color=pink\n'
          '  xaxis.stubs: text\n'
          'ABC Corp\n'
          'NetStuff\n'
          'MicroMason\n'
          '\n'
          '#proc getdata\n'
          '  data: 6430 -780 13470\n'
          '\n'
          '#proc processdata\n'
          '  action: rotate\n'
          '\n'
          '#proc bars\n'
          '  lenfield: 1\n'
          '  color: dullyellow\n'
          '  labelword: $ @@N\n'
          '  crossover: 0\n'
          '"]'),
         'insertion',
         ('[:ploticus: "\n#proc areadef\n  title: Annual Revenues, in '
          'thousands\n  rectangle: 1 1 5 2\n  xrange: 0 4\n  yrange: -5000 '
          '15000\n  yaxis.stubs: incremental 5000\n  yaxis.grid: color=pink\n'
          '  xaxis.stubs: text\nABC Corp\nNetStuff\nMicroMason\n\n'
          '#proc getdata\n  data: 6430 -780 13470\n\n#proc processdata\n'
          '  action: rotate\n\n#proc bars\n  lenfield: 1\n  color: dullyellow\n'
          '  labelword: $ @@N\n  crossover: 0\n"]')),

        ('ExternalGraphicalApplications',
         """[:gnuplot:"
set key right nobox
set samples 100
plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1))
"]""",
          'insertion',
          """[:gnuplot: "
set key right nobox
set samples 100
plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1))
"]"""),
    ]
    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    wikidoc = MockWikiDocument(None, LANGUAGE_NAME)
    tests = enumerate(test_fragments, 1)
    for nr, (pageName, text, node_name, formatted_text) in tests:
        text_ = '\n%s\n\n' % text
        try:
            page = wikidoc.getWikiPage(pageName)
        except WikiWordNotFoundException:
            page = wikidoc.createWikiPage(pageName)
        page.setContent(text_)
        ast = page.getLivePageAst()
        nf = NodeFinder(ast)
        if node_name is not None:
            assert nf.count(node_name) == 1
        else:
            assert nf.count('wikiWord') == 0
            assert nf.count('attribute') == 0
            assert nf.count('insertion') == 0
        result = langHelper.generate_text(ast, page)[1:-2]
        assert result == formatted_text, '%d: %r on %r -> %r != %r' % (
            nr, text, pageName, result, formatted_text)
Ejemplo n.º 24
0
def test_generate_WikidPadHelp():
    """Run over *complete* WikidPadHelp wiki: parse each page, generate text
    from AST using text generator, and check if generated text matches the
    original text::

        text -> |parser| -> AST -> |text generator| -> result
        assert result == text

    The *first time*, set `add_unknown_differences_to_annotation_file` to
    True and annotate generated file with differences: put '!=' if different,
    and '==' if equal (in semantics, maybe not in syntax, e.g.,
    [key:  value  ] is equal to [key: value] (note the extra spaces)).

    """
    # add_unknown_differences_to_annotation_file = True
    add_unknown_differences_to_annotation_file = False

    def load_annotations(path):
        equivalents = defaultdict(dict)
        known_differences = defaultdict(dict)
        page_name, text, result = None, None, None
        try:
            with io.open(path, 'r', encoding='utf-8') as f:
                for line in f:
                    line = line.strip()
                    if not line or line.startswith('#'):
                        continue
                    if line.startswith('-- '):
                        text = line[3:]
                    elif line.startswith('== '):
                        result = line[3:]
                        equivalents[page_name][text] = result
                    elif line.startswith('!= '):
                        result = line[3:]
                        known_differences[page_name][text] = result
                    else:
                        page_name = line
        except IOError:
            pass  # no file yet?!
        return equivalents, known_differences

    equivalents, known_differences = load_annotations(WIKIDPADHELP_ANNOTATIONS)

    langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME)
    wikidoc = MockWikiDocument(None, LANGUAGE_NAME)
    paths = glob.glob(os.path.join(WIKIDPADHELP_DATA_DIR, '*.wiki'))
    skip = set()

# Page itself is in WikidPad syntax so it has to work as well

#     skip = set(
#         u'MediaWiki%2FTextFormatting',  # Media Wiki syntax, not WikidPadParser
#     )
    nof_known_differences = 0
    nof_unknown_differences = 0
    for nr, path in enumerate(sorted(paths), 1):
        pageName, _ = os.path.splitext(os.path.basename(path))
        if pageName in skip:
            continue
        text = get_text(path)
        try:
            page = wikidoc.getWikiPage(pageName)
        except WikiWordNotFoundException:
            page = wikidoc.createWikiPage(pageName)
        page.setContent(text)

        ast = page.getLivePageAst()
        result = langHelper.generate_text(ast, page)
        # assert result == text

        current_page_correct = True
        with io.open(WIKIDPADHELP_ANNOTATIONS, 'a', encoding='utf-8') as f:
            to_compare = list(zip(result.splitlines(True), text.splitlines(True)))
            for result_line, text_line in to_compare:
                result_line = result_line.rstrip()
                text_line = text_line.rstrip()
                if result_line == text_line:
                    continue  # ok, equal
                try:
                    equivalent_line = equivalents[pageName][text_line]
                except KeyError:
                    equivalent_line = None
                if result_line == equivalent_line:
                    continue  # ok, lines are considered equal
                try:
                    known_difference = known_differences[pageName][text_line]
                except KeyError:
                    known_difference = None
                if result_line == known_difference:
                    nof_known_differences += 1
                    continue  # ok, we know about this difference

                # we have an unknown difference here
                nof_unknown_differences += 1
                if add_unknown_differences_to_annotation_file:
                    if current_page_correct:  # first error for this page
                        current_page_correct = False
                        f.write(pageName + '\n')
                    f.write('-- ' + text_line + '\n')
                    f.write('!= ' + result_line + '\n')

    msg = 'TOTAL: %d known differences, %d unknown differences'
    msg %= (nof_known_differences, nof_unknown_differences)
    assert not nof_unknown_differences, msg