Ejemplo n.º 1
0
 def test_trailing_empty_space(self):
     with patch('mistletoe.span_token.FootnoteAnchor') as mock:
         tokens = span_token.tokenize_inner('[alt] foo')
         next(tokens)
         mock.assert_called_with('alt')
         next(tokens)
         self.mock.assert_called_with(' foo')
Ejemplo n.º 2
0
 def __call__(
     self, ctx: Context, arg: str, block: Optional[List[BlockToken]]
 ) -> str:
     arguments: Dict[str, Any] = {"ctx": ctx}
     children: Optional[List[Token]] = block
     if self.kind is Kind.INLINE and self.requires_children:
         # Note: macros are executed in a rendering context, so
         # tokenize_inner will have access to all the extra tokens.
         children = tokenize_inner(arg)
         arg = ""
     if self.requires_arg:
         arguments["arg"] = arg
     elif arg:
         raise MacroError(f"unexpected argument {arg!r}")
     if self.requires_children:
         if children is None:
             # This can only happen for block macros. For inline macros, we'd
             # at least tokenize "" which becomes [].
             assert self.kind is Kind.BLOCK
             raise MacroError("macro needs a blockquote")
         arguments["children"] = children
     elif children:
         assert self.kind is Kind.BLOCK
         raise MacroError("macro does not take blockquote")
     return self.function(**arguments)
Ejemplo n.º 3
0
 def test_render_link(self):
     url = 'http://{0}.{1}.{2}'.format(self.genRandomString(5), self.genRandomString(5), self.genRandomString(3))
     body = self.genRandomString(80, True)
     token = next(iter(tokenize_inner('[{body}]({url})'.format(url=url, body=body))))
     expected = '[{body}|{url}]'.format(url=url, body=body)
     actual = self.renderer.render(token)
     self.assertEqual(expected, actual)
Ejemplo n.º 4
0
 def test_span(self):
     raw = 'some <span>more</span> text'
     tokens = tokenize_inner(raw)
     next(tokens)
     content = '<span>more</span>'
     self._test_html_token(next(tokens), html_token.HTMLSpan, content)
     next(tokens)
Ejemplo n.º 5
0
 def test_render_auto_link(self):
     url = 'http://{0}.{1}.{2}'.format(self.genRandomString(5),
                                       self.genRandomString(5),
                                       self.genRandomString(3))
     token = next(tokenize_inner('<{url}>'.format(url=url)))
     expected = '[{url}]'.format(url=url)
     actual = self.renderer.render(token)
     self.assertEqual(expected, actual)
Ejemplo n.º 6
0
 def test_span(self, MockRawText):
     raw = 'some <span>more</span> text'
     tokens = tokenize_inner(raw)
     next(tokens)
     MockRawText.assert_called_with('some ')
     content = '<span>more</span>'
     self._test_html_token(next(tokens), html_token.HTMLSpan, content)
     next(tokens)
     MockRawText.assert_called_with(' text')
Ejemplo n.º 7
0
 def test_parse(self, MockRawText):
     tokens = tokenize_inner('text with [[wiki | target]]')
     next(tokens)
     MockRawText.assert_called_with('text with ')
     token = next(tokens)
     self.assertIsInstance(token, GithubWiki)
     self.assertEqual(token.target, 'target')
     next(token.children)
     MockRawText.assert_called_with('wiki')
Ejemplo n.º 8
0
 def test_parse(self):
     MockRawText = mock.Mock(autospec='mistletoe.span_token.RawText')
     RawText = _token_types.pop()
     _token_types.append(MockRawText)
     try:
         tokens = tokenize_inner('text with [[wiki | target]]')
         token = tokens[1]
         self.assertIsInstance(token, GithubWiki)
         self.assertEqual(token.target, 'target')
         MockRawText.assert_has_calls([mock.call('text with '), mock.call('wiki')])
     finally:
         _token_types[-1] = RawText
Ejemplo n.º 9
0
 def test_parse(self):
     MockRawText = mock.Mock(autospec='mistletoe.span_token.RawText')
     RawText = _token_types.pop()
     _token_types.append(MockRawText)
     try:
         tokens = tokenize_inner('text with [[wiki | target]]')
         next(tokens)
         MockRawText.assert_called_with('text with ')
         token = next(tokens)
         self.assertIsInstance(token, GithubWiki)
         self.assertEqual(token.target, 'target')
         next(iter(token.children))
         MockRawText.assert_called_with('wiki')
     finally:
         _token_types[-1] = RawText
Ejemplo n.º 10
0
 def test_parse_in_text(self):
     tokens = iter(span_token.tokenize_inner('some \*text*'))
     self._test_token(next(tokens), 'some ', children=False)
     self._test_token(next(tokens), '*')
     self._test_token(next(tokens), 'text*', children=False)
Ejemplo n.º 11
0
 def test_render_image(self):
     token = next(iter(tokenize_inner('![image](foo.jpg)')))
     expected = '!foo.jpg!'
     actual = self.renderer.render(token)
     self.assertEqual(expected, actual)
Ejemplo n.º 12
0
 def textFormatTest(self, inputTemplate, outputTemplate):
     input = self.genRandomString(80, False)
     token = next(iter(tokenize_inner(inputTemplate.format(input))))
     expected = outputTemplate.format(input)
     actual = self.renderer.render(token)
     self.assertEqual(expected, actual)
Ejemplo n.º 13
0
def tokenize(content):
    tokens = span_token.tokenize_inner(content)
    return tuple(
        filter(lambda token: not isinstance(token, Whitespace), tokens))
Ejemplo n.º 14
0
 def test_render(self):
     token = next(tokenize_inner('[[wiki|target]]'))
     output = '<a href="target">wiki</a>'
     self.assertEqual(self.renderer.render(token), output)
Ejemplo n.º 15
0
 def __init__(self, lines):
     self.children = span_token.tokenize_inner(''.join(
         [line.strip() for line in lines]))
Ejemplo n.º 16
0
 def test_autolink(self):
     from mistletoe.span_token import AutoLink
     self.assertIsInstance(next(tokenize_inner('<autolink>')), AutoLink)
Ejemplo n.º 17
0
 def test_parse_multi_links(self):
     tokens = iter(span_token.tokenize_inner('[n1](t1) & [n2](t2)'))
     self._test_token(next(tokens), 'n1', target='t1')
     self._test_token(next(tokens), ' & ', children=False)
     self._test_token(next(tokens), 'n2', target='t2')
Ejemplo n.º 18
0
 def _test_parse(self, token_cls, raw, arg, **kwargs):
     token = next(iter(span_token.tokenize_inner(raw)))
     self.assertIsInstance(token, token_cls)
     self._test_token(token, arg, **kwargs)
Ejemplo n.º 19
0
 def test_parse(self):
     token, = span_token.tokenize_inner('  \n')
     self.assertIsInstance(token, span_token.LineBreak)
Ejemplo n.º 20
0
 def test_inline_code(self):
     from mistletoe.span_token import tokenize_inner
     rendered = self.renderer.render(tokenize_inner('`foo`')[0])
     self.assertEqual(rendered, '<code>foo</code>')
Ejemplo n.º 21
0
 def __init__(self, lines):
     content = ''.join(lines).replace('\n', ' ').strip()
     self.children = span_token.tokenize_inner(content)
Ejemplo n.º 22
0
 def test_span(self):
     token = next(tokenize_inner('$ 1 + 2 = 3 $'))
     self.assertIsInstance(token, Math)
     self.assertEqual(token.content, '$ 1 + 2 = 3 $')
Ejemplo n.º 23
0
 def test_contains(self):
     token = next(
         iter(span_token.tokenize_inner('**with some *emphasis* text**')))
     self.assertTrue('text' in token)
     self.assertTrue('emphasis' in token)
     self.assertFalse('foo' in token)
Ejemplo n.º 24
0
 def test_span_attrs(self):
     raw = '<span class="foo">more</span>'
     token = next(tokenize_inner(raw))
     content = '<span class="foo">more</span>'
     self._test_html_token(token, html_token.HTMLSpan, content)
Ejemplo n.º 25
0
 def test_parse_multiple(self):
     tokens = iter(span_token.tokenize_inner('~~one~~ ~~two~~'))
     self._test_token(next(tokens), 'one')
     self._test_token(next(tokens), 'two')
Ejemplo n.º 26
0
 def test_empty_span(self):
     raw = '<span></span>'
     token = next(tokenize_inner(raw))
     content = '<span></span>'
     self._test_html_token(token, html_token.HTMLSpan, content)
Ejemplo n.º 27
0
 def test_parse_children(self):
     token = next(iter(span_token.tokenize_inner('[![alt](src)](target)')))
     child = next(iter(token.children))
     self._test_token(child, 'alt', src='src')
Ejemplo n.º 28
0
 def test_self_closing_span(self):
     raw = '<span />'
     token = next(tokenize_inner(raw))
     content = '<span />'
     self._test_html_token(token, html_token.HTMLSpan, content)