Пример #1
0
 def test_block(self):
     doc = self._getdoc('abc:: \na')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 8,
                               [self.TOKENIZER.tokens.default] * 3 +
                               [self.TOKENIZER.tokens.block] * 4 +
                               [self.TOKENIZER.tokens.default])
Пример #2
0
    def test_media(self):
        doc = self._getdoc('@media a{b{c:d}}@media e{f{g:h}}')
        doc.mode.run_tokenizer(None)

        tokenizer = cssmode.CSSMode.tokenizer
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 32, [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 + [
                tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.
                tokens.string
            ] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] *
            1 + [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1 +
            [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 + [
                tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.
                tokens.string
            ] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] *
            1 + [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1)
Пример #3
0
 def test_block(self):
     doc = self._getdoc('abc:: \na')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 8, 
         [self.TOKENIZER.tokens.default] * 3 +
         [self.TOKENIZER.tokens.block] * 4 +
         [self.TOKENIZER.tokens.default])
Пример #4
0
    def test_jselem(self):
        doc = self._getdoc("<script>if</script>")

        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 19,
                                  [self.TOKENIZER.tokens.scripttag] * 8 +
                                  [self.JSTokenizer.tokens.keyword] * 2 +
                                  [self.TOKENIZER.tokens.closetag] * 9)
Пример #5
0
    def test_jselem(self):
        doc = self._getdoc("<script>if</script>")

        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 19, 
            [self.TOKENIZER.tokens.scripttag] * 8 +
            [self.JSTokenizer.tokens.keyword] * 2 +
            [self.TOKENIZER.tokens.closetag] * 9)
Пример #6
0
    def test_inline_delim(self):

        doc = self._getdoc('abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.default] * 7)

        doc = self._getdoc('*abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.emphasis] * 7)
Пример #7
0
 def test_csselem(self):
     doc = self._getdoc("<style>a{b:c}</style>")
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 21, 
         [self.TOKENIZER.tokens.styletag] * 7 +
         [self.CSSTokenizer.tokens.default] * 1 +
         [self.CSSTokenizer.tokens.ruleset] * 1 +
         [self.CSSPropTokenizer.tokens.propname] * 2	 +
         [self.CSSPropValueTokenizer.tokens.string] * 1 +
         [self.CSSPropTokenizer.tokens.terminate_name] * 1 +
         [self.TOKENIZER.tokens.closetag] * 8 )
Пример #8
0
 def test_csselem(self):
     doc = self._getdoc("<style>a{b:c}</style>")
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(
         doc, 0, 21, [self.TOKENIZER.tokens.styletag] * 7 +
         [self.CSSTokenizer.tokens.default] * 1 +
         [self.CSSTokenizer.tokens.ruleset] * 1 +
         [self.CSSPropTokenizer.tokens.propname] * 2 +
         [self.CSSPropValueTokenizer.tokens.string] * 1 +
         [self.CSSPropTokenizer.tokens.terminate_name] * 1 +
         [self.TOKENIZER.tokens.closetag] * 8)
Пример #9
0
    def test_inline_delim(self):

        doc = self._getdoc('abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.default] * 7)

        doc = self._getdoc('*abc*abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.emphasis] * 7)
Пример #10
0
    def test_image(self):
        doc = self._getdoc('![link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
            [self.TOKENIZER.tokens.link]*13)

        doc = self._getdoc('![link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 12,
            [self.TOKENIZER.tokens.link]*8 + 
            [self.TOKENIZER._LinkTokenizer.tokens.default]*3 + 
            [self.TOKENIZER._LinkTokenizer.tokens.close]*1)
Пример #11
0
    def test_image(self):
        doc = self._getdoc('![link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
                                  [self.TOKENIZER.tokens.link] * 13)

        doc = self._getdoc('![link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 12, [self.TOKENIZER.tokens.link] * 8 +
            [self.TOKENIZER._LinkTokenizer.tokens.default] * 3 +
            [self.TOKENIZER._LinkTokenizer.tokens.close] * 1)
Пример #12
0
    def test_no_media(self):
        doc = self._getdoc('sel{abc:def;}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(
            doc, 0, 13,
            [tokenizer.tokens.default] * 3 + [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 + [
                tokenizer.PropTokenizer.PropValueTokenizer.tokens.
                terminate_value
            ] * 1 + [tokenizer.PropTokenizer.tokens.terminate_name] * 1)
Пример #13
0
    def test_no_media(self):
        doc = self._getdoc('sel{abc:def;}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(doc, 0, 13, 
            [tokenizer.tokens.default] * 3 +
            [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.terminate_value] * 1 +
            [tokenizer.PropTokenizer.tokens.terminate_name] * 1
        )
Пример #14
0
    def test_list(self):
        doc = self._getdoc('* abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5, [self.TOKENIZER.tokens.list] * 2 +
                                  [self.TOKENIZER.tokens.default] * 3)

        doc = self._getdoc('1. abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, [self.TOKENIZER.tokens.list] * 3 +
                                  [self.TOKENIZER.tokens.default] * 3)

        doc = self._getdoc('1. abc\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 16,
                                  [self.TOKENIZER.tokens.list] * 3 +
                                  [self.TOKENIZER.tokens.default] * 4 +
                                  [self.TOKENIZER.tokens.list] * 6 +
                                  [self.TOKENIZER.tokens.default] * 3)

        doc = self._getdoc('1. abc\na\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 18,
                                  [self.TOKENIZER.tokens.list] * 3 +
                                  [self.TOKENIZER.tokens.default] * 6 +
                                  [self.TOKENIZER.tokens.code1] * 9)
Пример #15
0
    def test_link(self):
        doc = self._getdoc('[link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 12, 
            [self.TOKENIZER.tokens.link]*12)

        doc = self._getdoc('[link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 11, 
            [self.TOKENIZER.tokens.link]*7 + 
            [self.TOKENIZER._LinkTokenizer.tokens.default]*3+
            [self.TOKENIZER._LinkTokenizer.tokens.close]*1)

        doc = self._getdoc('[link](url "te)xt")')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 19, 
            [self.TOKENIZER.tokens.link]*7 + 
            [self.TOKENIZER._LinkTokenizer.tokens.default]*4+
            [self.TOKENIZER._LinkTokenizer.tokens.desc]*7+
            [self.TOKENIZER._LinkTokenizer.tokens.close]*1)

        doc = self._getdoc('[link]:')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.link]*7)
Пример #16
0
    def test_list(self):
        doc = self._getdoc('* abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5,
            [self.TOKENIZER.tokens.list]*2 +
            [self.TOKENIZER.tokens.default]*3)

        doc = self._getdoc('1. abc')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.list]*3 +
            [self.TOKENIZER.tokens.default]*3)

        doc = self._getdoc('1. abc\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 16,
            [self.TOKENIZER.tokens.list]*3 +
            [self.TOKENIZER.tokens.default]*4+
            [self.TOKENIZER.tokens.list]*6+
            [self.TOKENIZER.tokens.default]*3)

        doc = self._getdoc('1. abc\na\n    * def')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 18,
            [self.TOKENIZER.tokens.list]*3 +
            [self.TOKENIZER.tokens.default]*6+
            [self.TOKENIZER.tokens.code1]*9)
Пример #17
0
    def test_propvalue(self):
        doc = self._getdoc('sel{abc:.1em;def:ghi}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(
            doc, 0, 21,
            [tokenizer.tokens.default] * 3 + [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.number] * 4 + [
                tokenizer.PropTokenizer.PropValueTokenizer.tokens.
                terminate_value
            ] * 1 + [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 +
            [tokenizer.PropTokenizer.tokens.terminate_name] * 1)
Пример #18
0
    def test_propvalue(self):
        doc = self._getdoc('sel{abc:.1em;def:ghi}')
        doc.mode.run_tokenizer(None)
        tokenizer = cssmode.CSSMode.tokenizer

        kaa_testutils.check_style(doc, 0, 21, 
            [tokenizer.tokens.default] * 3 +
            [tokenizer.tokens.ruleset] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.number] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.terminate_value] * 1 +
            [tokenizer.PropTokenizer.tokens.propname] * 4 +
            [tokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 3 +
            [tokenizer.PropTokenizer.tokens.terminate_name] * 1
        )
Пример #19
0
    def test_highlight(self):
        doc = self._getdoc('<a b=c d="e" f=\'g\'>')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 19, [self.TOKENIZER.tokens.tag] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 2 +
                                  [self.ValueTokenizer3.tokens.value] * 1 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 3 +
                                  [self.ValueTokenizer2.tokens.value] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 3 +
                                  [self.ValueTokenizer1.tokens.value] * 2 +
                                  [self.TOKENIZER.tokens.tag] * 1)
Пример #20
0
 def test_highlight(self):
     doc = self._getdoc('<a b=c d="e" f=\'g\'>')
     doc.mode.run_tokenizer(None)
     
     kaa_testutils.check_style(doc, 0, 19, 
         [self.TOKENIZER.tokens.tag] * 2 +
         [self.AttrTokenizer.tokens.default] * 1 + 
         [self.AttrTokenizer.tokens.attr] * 2 +
         [self.ValueTokenizer3.tokens.value] * 1 +
         [self.AttrTokenizer.tokens.default] * 1 +
         [self.AttrTokenizer.tokens.attr] * 3 +
         [self.ValueTokenizer2.tokens.value] * 2 +
         [self.AttrTokenizer.tokens.default] * 1 +
         [self.AttrTokenizer.tokens.attr] * 3 +
         [self.ValueTokenizer1.tokens.value] * 2 +
         [self.TOKENIZER.tokens.tag] * 1)
Пример #21
0
    def test_jsattr(self):
        doc = self._getdoc("<a ona='if'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, [self.TOKENIZER.tokens.tag] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 5 +
                                  [self.ValueJSTokenizer1.tokens.keyword] * 2 +
                                  [self.AttrTokenizer.tokens.attr] * 1 +
                                  [self.TOKENIZER.tokens.tag] * 1)

        doc = self._getdoc('<a ona="if">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, [self.TOKENIZER.tokens.tag] * 2 +
                                  [self.AttrTokenizer.tokens.default] * 1 +
                                  [self.AttrTokenizer.tokens.attr] * 5 +
                                  [self.ValueJSTokenizer2.tokens.keyword] * 2 +
                                  [self.AttrTokenizer.tokens.attr] * 1 +
                                  [self.TOKENIZER.tokens.tag] * 1)
Пример #22
0
    def test_cssattr(self):
        doc = self._getdoc("<a style='a:b'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(
            doc, 0, 15, [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 +
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer1.tokens.propname] * 2 +
            [self.PropValueTokenizer1.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        doc = self._getdoc('<a style="a:b">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(
            doc, 0, 15, [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 +
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        kaa_testutils.check_style(
            doc, 0, 15, [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 +
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)
Пример #23
0
    def test_cssattr(self):
        doc = self._getdoc("<a style='a:b'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 15, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer1.tokens.propname] * 2 +
            [self.PropValueTokenizer1.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)


        doc = self._getdoc('<a style="a:b">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 15, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        kaa_testutils.check_style(doc, 0, 15, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 7 +
            [self.AttrCSSTokenizer2.tokens.propname] * 2 +
            [self.PropValueTokenizer2.tokens.string] * 1 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)
Пример #24
0
    def test_jsattr(self):
        doc = self._getdoc("<a ona='if'>")
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 5 +
            [self.ValueJSTokenizer1.tokens.keyword] * 2 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)

        doc = self._getdoc('<a ona="if">')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 12, 
            [self.TOKENIZER.tokens.tag] * 2 +
            [self.AttrTokenizer.tokens.default] * 1 + 
            [self.AttrTokenizer.tokens.attr] * 5 +
            [self.ValueJSTokenizer2.tokens.keyword] * 2 +
            [self.AttrTokenizer.tokens.attr] * 1 +
            [self.TOKENIZER.tokens.tag] * 1)
Пример #25
0
    def test_media(self):
        doc = self._getdoc('@media a{b{c:d}}@media e{f{g:h}}')
        doc.mode.run_tokenizer(None)

        tokenizer = cssmode.CSSMode.tokenizer
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 32, 
            [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1 +

            [tokenizer.tokens.media] * 9 +
            [tokenizer.MediaCSSTokenizer.tokens.default] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.ruleset] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.propname] * 2 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.PropValueTokenizer.tokens.string] * 1 +
            [tokenizer.MediaCSSTokenizer.PropTokenizer.tokens.terminate_name] * 1 +
            [tokenizer.MediaCSSTokenizer.tokens.terminate_media] * 1
            )
Пример #26
0
    def test_emphasis(self):
        doc = self._getdoc('**text**')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
            [self.TOKENIZER.tokens.strong1]*8)

        doc = self._getdoc('__text__')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
            [self.TOKENIZER.tokens.strong2]*8)

        doc = self._getdoc('*text*')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.emphasis1]*6)

        doc = self._getdoc('_text_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.emphasis2]*6)
Пример #27
0
    def test_emphasis(self):
        doc = self._getdoc('**text**')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
                                  [self.TOKENIZER.tokens.strong1] * 8)

        doc = self._getdoc('__text__')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 8,
                                  [self.TOKENIZER.tokens.strong2] * 8)

        doc = self._getdoc('*text*')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.emphasis1] * 6)

        doc = self._getdoc('_text_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.emphasis2] * 6)
Пример #28
0
    def test_literal(self):
        doc = self._getdoc('`text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, [self.TOKENIZER.tokens.code3] * 6)

        doc = self._getdoc('```\ntext\n```\n')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
                                  [self.TOKENIZER.tokens.code2] * 13)

        doc = self._getdoc('` text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.default] * 6 +
                                  [self.TOKENIZER.tokens.code3])

        doc = self._getdoc('    text\na')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 10,
                                  [self.TOKENIZER.tokens.code1] * 8 +
                                  [self.TOKENIZER.tokens.default] * 2)
Пример #29
0
    def test_literal(self):
        doc = self._getdoc('`text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
            [self.TOKENIZER.tokens.code3]*6)

        doc = self._getdoc('```\ntext\n```\n')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 13,
            [self.TOKENIZER.tokens.code2]*13)

        doc = self._getdoc('` text`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
            [self.TOKENIZER.tokens.default]*6 +
            [self.TOKENIZER.tokens.code3])

        doc = self._getdoc('    text\na')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 10,
            [self.TOKENIZER.tokens.code1]*8 +
            [self.TOKENIZER.tokens.default]*2)
Пример #30
0
    def test_link(self):
        doc = self._getdoc('[link][link]')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 12,
                                  [self.TOKENIZER.tokens.link] * 12)

        doc = self._getdoc('[link](url)')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 11, [self.TOKENIZER.tokens.link] * 7 +
            [self.TOKENIZER._LinkTokenizer.tokens.default] * 3 +
            [self.TOKENIZER._LinkTokenizer.tokens.close] * 1)

        doc = self._getdoc('[link](url "te)xt")')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(
            doc, 0, 19, [self.TOKENIZER.tokens.link] * 7 +
            [self.TOKENIZER._LinkTokenizer.tokens.default] * 4 +
            [self.TOKENIZER._LinkTokenizer.tokens.desc] * 7 +
            [self.TOKENIZER._LinkTokenizer.tokens.close] * 1)

        doc = self._getdoc('[link]:')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, [self.TOKENIZER.tokens.link] * 7)
Пример #31
0
    def test_header1(self):
        doc = self._getdoc('abc\n---')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.header1] * 7)
Пример #32
0
    def test_inline(self):

        doc = self._getdoc("**abc**")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.strong] * 7)

        doc = self._getdoc("*abc*")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5,
                                  [self.TOKENIZER.tokens.emphasis] * 5)

        doc = self._getdoc('``abc``')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7,
                                  [self.TOKENIZER.tokens.literal] * 7)

        doc = self._getdoc('`abc`_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.interpreted] * 6)

        doc = self._getdoc('abc_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 4,
                                  [self.TOKENIZER.tokens.reference] * 4)

        doc = self._getdoc('_`abc`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.target] * 6)

        doc = self._getdoc('|abc|')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5,
                                  [self.TOKENIZER.tokens.substitution] * 5)

        doc = self._getdoc('[abc]_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6,
                                  [self.TOKENIZER.tokens.citation] * 6)
Пример #33
0
 def test_table(self):
     doc = self._getdoc('+=+\n| |')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 7,
                               [self.TOKENIZER.tokens.table_border] * 4 +
                               [self.TOKENIZER.tokens.table_row] * 3)
Пример #34
0
    def test_inline(self):

        doc = self._getdoc("**abc**")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.strong] * 7)

        doc = self._getdoc("*abc*")
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5, 
            [self.TOKENIZER.tokens.emphasis] * 5)

        doc = self._getdoc('``abc``')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 7, 
            [self.TOKENIZER.tokens.literal] * 7)

        doc = self._getdoc('`abc`_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, 
            [self.TOKENIZER.tokens.interpreted] * 6)

        doc = self._getdoc('abc_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 4, 
            [self.TOKENIZER.tokens.reference] * 4)

        doc = self._getdoc('_`abc`')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, 
            [self.TOKENIZER.tokens.target] * 6)

        doc = self._getdoc('|abc|')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 5, 
            [self.TOKENIZER.tokens.substitution] * 5)

        doc = self._getdoc('[abc]_')
        doc.mode.run_tokenizer(None)
        kaa_testutils.check_style(doc, 0, 6, 
            [self.TOKENIZER.tokens.citation] * 6)
Пример #35
0
 def test_table(self):
     doc = self._getdoc('+=+\n| |')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 7, 
         [self.TOKENIZER.tokens.table_border] * 4 +
         [self.TOKENIZER.tokens.table_row] * 3)
Пример #36
0
    def test_hr(self):
        doc = self._getdoc('----')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 4, [self.TOKENIZER.tokens.hr] * 4)
Пример #37
0
 def test_directive(self):
     doc = self._getdoc('.. abc:: 111\n 222\n333')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 21,
                               [self.TOKENIZER.tokens.directive] * 18 +
                               [self.TOKENIZER.tokens.default] * 3)
Пример #38
0
    def test_header2(self):
        doc = self._getdoc('# abc')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 5,
                                  [self.TOKENIZER.tokens.header2] * 5)
Пример #39
0
    def test_header2(self):
        doc = self._getdoc('# abc')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 5, 
            [self.TOKENIZER.tokens.header2] * 5)
Пример #40
0
 def test_header(self):
     doc = self._getdoc('--\nab\n---')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 9,
                               [self.TOKENIZER.tokens.header1] * 9)
Пример #41
0
    def test_hr(self):
        doc = self._getdoc('----')
        doc.mode.run_tokenizer(None)

        kaa_testutils.check_style(doc, 0, 4, 
            [self.TOKENIZER.tokens.hr] * 4)
Пример #42
0
 def test_directive(self):
     doc = self._getdoc('.. abc:: 111\n 222\n333')
     doc.mode.run_tokenizer(None)
     kaa_testutils.check_style(doc, 0, 21, 
         [self.TOKENIZER.tokens.directive] * 18 +
         [self.TOKENIZER.tokens.default] * 3)