示例#1
0
文件: test_rst.py 项目: smorin/kaa
 def test_block(self):
     doc = self._getdoc('abc:: \na')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 8,
                               [self.TOKENIZER.tokens.default] * 3 +
                               [self.TOKENIZER.tokens.block] * 4 +
                               [self.TOKENIZER.tokens.default])
示例#2
0
文件: test_css.py 项目: smorin/kaa
    def test_media(self):
        doc = self._getdoc('@media a{b{c:d}}@media e{f{g:h}}')
        doc.mode.run_tokenizer(None)

        tokenizer = cssmode.CSSMode.tokenizer
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 32, [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 + [
                tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.
                tokens.string
            ] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] *
            1 + [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1 +
            [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 + [
                tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.
                tokens.string
            ] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] *
            1 + [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1)
示例#3
0
文件: test_rst.py 项目: hirokiky/kaa
 def test_block(self):
     doc = self._getdoc('abc:: \na')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 8, 
         [self.TOKENIZER.tokens.default] * 3 +
         [self.TOKENIZER.tokens.block] * 4 +
         [self.TOKENIZER.tokens.default])
示例#4
0
    def test_jselem(self):
        doc = self._getdoc("<script>if</script>")

        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 19,
                                  [self.TOKENIZER.tokens.scripttag] * 8 +
                                  [self.JSTokenizer.tokens.keyword] * 2 +
                                  [self.TOKENIZER.tokens.closetag] * 9)
示例#5
0
    def test_jselem(self):
        doc = self._getdoc("<script>if</script>")

        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 19, 
            [self.TOKENIZER.tokens.scripttag] * 8 +
            [self.JSTokenizer.tokens.keyword] * 2 +
            [self.TOKENIZER.tokens.closetag] * 9)
示例#6
0
文件: test_rst.py 项目: smorin/kaa
    def test_inline_delim(self):

        doc = self._getdoc('abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.default] * 7)

        doc = self._getdoc('*abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.emphasis] * 7)
示例#7
0
 def test_csselem(self):
     doc = self._getdoc("<style>a{b:c}</style>")
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 21, 
         [self.TOKENIZER.tokens.styletag] * 7 +
         [self.CSSTokenizer.tokens.default] * 1 +
         [self.CSSTokenizer.tokens.ruleset] * 1 +
         [self.CSSPropTokenizer.tokens.propname] * 2	 +
         [self.CSSPropValueTokenizer.tokens.string] * 1 +
         [self.CSSPropTokenizer.tokens.terminate_name] * 1 +
         [self.TOKENIZER.tokens.closetag] * 8 )
示例#8
0
 def test_csselem(self):
     doc = self._getdoc("<style>a{b:c}</style>")
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(
         doc, 0, 21, [self.TOKENIZER.tokens.styletag] * 7 +
         [self.CSSTokenizer.tokens.default] * 1 +
         [self.CSSTokenizer.tokens.ruleset] * 1 +
         [self.CSSPropTokenizer.tokens.propname] * 2 +
         [self.CSSPropValueTokenizer.tokens.string] * 1 +
         [self.CSSPropTokenizer.tokens.terminate_name] * 1 +
         [self.TOKENIZER.tokens.closetag] * 8)
示例#9
0
文件: test_rst.py 项目: hirokiky/kaa
    def test_inline_delim(self):

        doc = self._getdoc('abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.default] * 7)

        doc = self._getdoc('*abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.emphasis] * 7)
示例#10
0
    def test_image(self):
        doc = self._getdoc('![link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
            [self.TOKENIZER.tokens.link]*13)

        doc = self._getdoc('![link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 12,
            [self.TOKENIZER.tokens.link]*8 + 
            [self.TOKENIZER._LinkTokenizer.tokens.default]*3 + 
            [self.TOKENIZER._LinkTokenizer.tokens.close]*1)
示例#11
0
    def test_image(self):
        doc = self._getdoc('![link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
                                  [self.TOKENIZER.tokens.link] * 13)

        doc = self._getdoc('![link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 12, [self.TOKENIZER.tokens.link] * 8 +
            [self.TOKENIZER._LinkTokenizer.tokens.default] * 3 +
            [self.TOKENIZER._LinkTokenizer.tokens.close] * 1)
示例#12
0
文件: test_css.py 项目: smorin/kaa
    def test_no_media(self):
        doc = self._getdoc('sel{abc:def;}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(
            doc, 0, 13,
            [tokenizer.tokens.default] * 3 + [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 + [
                tokenizer.PropTokenizer.PropValueTokenizer.tokens.
                terminate_value
            ] * 1 + [tokenizer.PropTokenizer.tokens.terminate_name] * 1)
示例#13
0
文件: test_css.py 项目: kaaedit/kaa
    def test_no_media(self):
        doc = self._getdoc('sel{abc:def;}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(doc, 0, 13, 
            [tokenizer.tokens.default] * 3 +
            [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.terminate_value] * 1 +
            [tokenizer.PropTokenizer.tokens.terminate_name] * 1
        )
示例#14
0
    def test_list(self):
        doc = self._getdoc('* abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5, [self.TOKENIZER.tokens.list] * 2 +
                                  [self.TOKENIZER.tokens.default] * 3)

        doc = self._getdoc('1. abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, [self.TOKENIZER.tokens.list] * 3 +
                                  [self.TOKENIZER.tokens.default] * 3)

        doc = self._getdoc('1. abc\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 16,
                                  [self.TOKENIZER.tokens.list] * 3 +
                                  [self.TOKENIZER.tokens.default] * 4 +
                                  [self.TOKENIZER.tokens.list] * 6 +
                                  [self.TOKENIZER.tokens.default] * 3)

        doc = self._getdoc('1. abc\na\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 18,
                                  [self.TOKENIZER.tokens.list] * 3 +
                                  [self.TOKENIZER.tokens.default] * 6 +
                                  [self.TOKENIZER.tokens.code1] * 9)
示例#15
0
    def test_link(self):
        doc = self._getdoc('[link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 12, 
            [self.TOKENIZER.tokens.link]*12)

        doc = self._getdoc('[link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 11, 
            [self.TOKENIZER.tokens.link]*7 + 
            [self.TOKENIZER._LinkTokenizer.tokens.default]*3+
            [self.TOKENIZER._LinkTokenizer.tokens.close]*1)

        doc = self._getdoc('[link](url "te)xt")')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 19, 
            [self.TOKENIZER.tokens.link]*7 + 
            [self.TOKENIZER._LinkTokenizer.tokens.default]*4+
            [self.TOKENIZER._LinkTokenizer.tokens.desc]*7+
            [self.TOKENIZER._LinkTokenizer.tokens.close]*1)

        doc = self._getdoc('[link]:')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.link]*7)
示例#16
0
    def test_list(self):
        doc = self._getdoc('* abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5,
            [self.TOKENIZER.tokens.list]*2 +
            [self.TOKENIZER.tokens.default]*3)

        doc = self._getdoc('1. abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.list]*3 +
            [self.TOKENIZER.tokens.default]*3)

        doc = self._getdoc('1. abc\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 16,
            [self.TOKENIZER.tokens.list]*3 +
            [self.TOKENIZER.tokens.default]*4+
            [self.TOKENIZER.tokens.list]*6+
            [self.TOKENIZER.tokens.default]*3)

        doc = self._getdoc('1. abc\na\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 18,
            [self.TOKENIZER.tokens.list]*3 +
            [self.TOKENIZER.tokens.default]*6+
            [self.TOKENIZER.tokens.code1]*9)
示例#17
0
文件: test_css.py 项目: smorin/kaa
    def test_propvalue(self):
        doc = self._getdoc('sel{abc:.1em;def:ghi}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(
            doc, 0, 21,
            [tokenizer.tokens.default] * 3 + [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.number] * 4 + [
                tokenizer.PropTokenizer.PropValueTokenizer.tokens.
                terminate_value
            ] * 1 + [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 +
            [tokenizer.PropTokenizer.tokens.terminate_name] * 1)
示例#18
0
文件: test_css.py 项目: kaaedit/kaa
    def test_propvalue(self):
        doc = self._getdoc('sel{abc:.1em;def:ghi}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(doc, 0, 21, 
            [tokenizer.tokens.default] * 3 +
            [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.number] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.terminate_value] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 +
            [tokenizer.PropTokenizer.tokens.terminate_name] * 1
        )
示例#19
0
    def test_highlight(self):
        doc = self._getdoc('<a b=c d="e" f=\'g\'>')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 19, [self.TOKENIZER.tokens.tag] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 2 +
                                  [self.ValueTokenizer3.tokens.value] * 1 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 3 +
                                  [self.ValueTokenizer2.tokens.value] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 3 +
                                  [self.ValueTokenizer1.tokens.value] * 2 +
                                  [self.TOKENIZER.tokens.tag] * 1)
示例#20
0
 def test_highlight(self):
     doc = self._getdoc('<a b=c d="e" f=\'g\'>')
     doc.mode.run_tokenizer(None)
     
     kaa_testutils.check_style(doc, 0, 19, 
         [self.TOKENIZER.tokens.tag] * 2 +
         [self.AttrTokenizer.tokens.default] * 1 + 
         [self.AttrTokenizer.tokens.attr] * 2 +
         [self.ValueTokenizer3.tokens.value] * 1 +
         [self.AttrTokenizer.tokens.default] * 1 +
         [self.AttrTokenizer.tokens.attr] * 3 +
         [self.ValueTokenizer2.tokens.value] * 2 +
         [self.AttrTokenizer.tokens.default] * 1 +
         [self.AttrTokenizer.tokens.attr] * 3 +
         [self.ValueTokenizer1.tokens.value] * 2 +
         [self.TOKENIZER.tokens.tag] * 1)
示例#21
0
    def test_jsattr(self):
        doc = self._getdoc("<a ona='if'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, [self.TOKENIZER.tokens.tag] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 5 +
                                  [self.ValueJSTokenizer1.tokens.keyword] * 2 +
                                  [self.AttrTokenizer.tokens.attr] * 1 +
                                  [self.TOKENIZER.tokens.tag] * 1)

        doc = self._getdoc('<a ona="if">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, [self.TOKENIZER.tokens.tag] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 5 +
                                  [self.ValueJSTokenizer2.tokens.keyword] * 2 +
                                  [self.AttrTokenizer.tokens.attr] * 1 +
                                  [self.TOKENIZER.tokens.tag] * 1)
示例#22
0
    def test_cssattr(self):
        doc = self._getdoc("<a style='a:b'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(
            doc, 0, 15, [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 +
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer1.tokens.propname] * 2 +
            [self.PropValueTokenizer1.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        doc = self._getdoc('<a style="a:b">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(
            doc, 0, 15, [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 +
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        kaa_testutils.check_style(
            doc, 0, 15, [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 +
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)
示例#23
0
    def test_cssattr(self):
        doc = self._getdoc("<a style='a:b'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 15, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer1.tokens.propname] * 2 +
            [self.PropValueTokenizer1.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)


        doc = self._getdoc('<a style="a:b">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 15, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        kaa_testutils.check_style(doc, 0, 15, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)
示例#24
0
    def test_jsattr(self):
        doc = self._getdoc("<a ona='if'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 5 +
            [self.ValueJSTokenizer1.tokens.keyword] * 2 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        doc = self._getdoc('<a ona="if">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 5 +
            [self.ValueJSTokenizer2.tokens.keyword] * 2 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)
示例#25
0
文件: test_css.py 项目: kaaedit/kaa
    def test_media(self):
        doc = self._getdoc('@media a{b{c:d}}@media e{f{g:h}}')
        doc.mode.run_tokenizer(None)

        tokenizer = cssmode.CSSMode.tokenizer
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 32, 
            [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1 +

            [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1
            )
示例#26
0
    def test_emphasis(self):
        doc = self._getdoc('**text**')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
            [self.TOKENIZER.tokens.strong1]*8)

        doc = self._getdoc('__text__')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
            [self.TOKENIZER.tokens.strong2]*8)

        doc = self._getdoc('*text*')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.emphasis1]*6)

        doc = self._getdoc('_text_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.emphasis2]*6)
示例#27
0
    def test_emphasis(self):
        doc = self._getdoc('**text**')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
                                  [self.TOKENIZER.tokens.strong1] * 8)

        doc = self._getdoc('__text__')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
                                  [self.TOKENIZER.tokens.strong2] * 8)

        doc = self._getdoc('*text*')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.emphasis1] * 6)

        doc = self._getdoc('_text_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.emphasis2] * 6)
示例#28
0
    def test_literal(self):
        doc = self._getdoc('`text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, [self.TOKENIZER.tokens.code3] * 6)

        doc = self._getdoc('```\ntext\n```\n')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
                                  [self.TOKENIZER.tokens.code2] * 13)

        doc = self._getdoc('` text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.default] * 6 +
                                  [self.TOKENIZER.tokens.code3])

        doc = self._getdoc('    text\na')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 10,
                                  [self.TOKENIZER.tokens.code1] * 8 +
                                  [self.TOKENIZER.tokens.default] * 2)
示例#29
0
    def test_literal(self):
        doc = self._getdoc('`text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.code3]*6)

        doc = self._getdoc('```\ntext\n```\n')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
            [self.TOKENIZER.tokens.code2]*13)

        doc = self._getdoc('` text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
            [self.TOKENIZER.tokens.default]*6 +
            [self.TOKENIZER.tokens.code3])

        doc = self._getdoc('    text\na')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 10,
            [self.TOKENIZER.tokens.code1]*8 +
            [self.TOKENIZER.tokens.default]*2)
示例#30
0
    def test_link(self):
        doc = self._getdoc('[link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 12,
                                  [self.TOKENIZER.tokens.link] * 12)

        doc = self._getdoc('[link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 11, [self.TOKENIZER.tokens.link] * 7 +
            [self.TOKENIZER._LinkTokenizer.tokens.default] * 3 +
            [self.TOKENIZER._LinkTokenizer.tokens.close] * 1)

        doc = self._getdoc('[link](url "te)xt")')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 19, [self.TOKENIZER.tokens.link] * 7 +
            [self.TOKENIZER._LinkTokenizer.tokens.default] * 4 +
            [self.TOKENIZER._LinkTokenizer.tokens.desc] * 7 +
            [self.TOKENIZER._LinkTokenizer.tokens.close] * 1)

        doc = self._getdoc('[link]:')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, [self.TOKENIZER.tokens.link] * 7)
示例#31
0
    def test_header1(self):
        doc = self._getdoc('abc\n---')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.header1] * 7)
示例#32
0
文件: test_rst.py 项目: smorin/kaa
    def test_inline(self):

        doc = self._getdoc("**abc**")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.strong] * 7)

        doc = self._getdoc("*abc*")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5,
                                  [self.TOKENIZER.tokens.emphasis] * 5)

        doc = self._getdoc('``abc``')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.literal] * 7)

        doc = self._getdoc('`abc`_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.interpreted] * 6)

        doc = self._getdoc('abc_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 4,
                                  [self.TOKENIZER.tokens.reference] * 4)

        doc = self._getdoc('_`abc`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.target] * 6)

        doc = self._getdoc('|abc|')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5,
                                  [self.TOKENIZER.tokens.substitution] * 5)

        doc = self._getdoc('[abc]_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.citation] * 6)
示例#33
0
文件: test_rst.py 项目: smorin/kaa
 def test_table(self):
     doc = self._getdoc('+=+\n| |')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 7,
                               [self.TOKENIZER.tokens.table_border] * 4 +
                               [self.TOKENIZER.tokens.table_row] * 3)
示例#34
0
文件: test_rst.py 项目: hirokiky/kaa
    def test_inline(self):

        doc = self._getdoc("**abc**")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.strong] * 7)

        doc = self._getdoc("*abc*")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5, 
            [self.TOKENIZER.tokens.emphasis] * 5)

        doc = self._getdoc('``abc``')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.literal] * 7)

        doc = self._getdoc('`abc`_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, 
            [self.TOKENIZER.tokens.interpreted] * 6)

        doc = self._getdoc('abc_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 4, 
            [self.TOKENIZER.tokens.reference] * 4)

        doc = self._getdoc('_`abc`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, 
            [self.TOKENIZER.tokens.target] * 6)

        doc = self._getdoc('|abc|')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5, 
            [self.TOKENIZER.tokens.substitution] * 5)

        doc = self._getdoc('[abc]_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, 
            [self.TOKENIZER.tokens.citation] * 6)
示例#35
0
文件: test_rst.py 项目: hirokiky/kaa
 def test_table(self):
     doc = self._getdoc('+=+\n| |')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 7, 
         [self.TOKENIZER.tokens.table_border] * 4 +
         [self.TOKENIZER.tokens.table_row] * 3)
示例#36
0
    def test_hr(self):
        doc = self._getdoc('----')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 4, [self.TOKENIZER.tokens.hr] * 4)
示例#37
0
文件: test_rst.py 项目: smorin/kaa
 def test_directive(self):
     doc = self._getdoc('.. abc:: 111\n 222\n333')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 21,
                               [self.TOKENIZER.tokens.directive] * 18 +
                               [self.TOKENIZER.tokens.default] * 3)
示例#38
0
    def test_header2(self):
        doc = self._getdoc('# abc')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 5,
                                  [self.TOKENIZER.tokens.header2] * 5)
示例#39
0
    def test_header2(self):
        doc = self._getdoc('# abc')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 5, 
            [self.TOKENIZER.tokens.header2] * 5)
示例#40
0
文件: test_rst.py 项目: smorin/kaa
 def test_header(self):
     doc = self._getdoc('--\nab\n---')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 9,
                               [self.TOKENIZER.tokens.header1] * 9)
示例#41
0
    def test_hr(self):
        doc = self._getdoc('----')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 4, 
            [self.TOKENIZER.tokens.hr] * 4)
示例#42
0
文件: test_rst.py 项目: hirokiky/kaa
 def test_directive(self):
     doc = self._getdoc('.. abc:: 111\n 222\n333')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 21, 
         [self.TOKENIZER.tokens.directive] * 18 +
         [self.TOKENIZER.tokens.default] * 3)