def test_render_auto_link(self): url = "http://{0}.{1}.{2}".format(self.genRandomString(5), self.genRandomString(5), self.genRandomString(3)) token = next(iter(tokenize_span("<{url}>".format(url=url)))) expected = "[{url}]".format(url=url) actual = self.renderer.render(token) self.assertEqual(expected, actual)
def test_foot_ref_span(name, source, data_regression): get_parse_context().foot_definitions["a"] = True _span_tokens = get_parse_context().span_tokens _span_tokens.insert_after(FootReference, CoreTokens) data_regression.check( serialize_tokens(tokenize_span(source), as_dict=True), basename=f"test_foot_ref_span_{name}", )
def test_inline_code(renderer_mock): renderer_mock.render(tokenize_span("`foo`")[0]) assert renderer_mock.document.pformat() == dedent( """\ <document source="notset"> <literal> foo """ )
def test_role_code(renderer_mock): renderer_mock.render(tokenize_span("{code}`` a=1{`} ``")[0]) assert renderer_mock.document.pformat() == dedent( """\ <document source="notset"> <literal classes="code"> a=1{`} """ )
def test_render_link(self): url = "http://{0}.{1}.{2}".format(self.genRandomString(5), self.genRandomString(5), self.genRandomString(3)) body = self.genRandomString(80, True) token = next( iter(tokenize_span("[{body}]({url})".format(url=url, body=body)))) expected = "[{body}|{url}]".format(url=url, body=body) actual = self.renderer.render(token) self.assertEqual(expected, actual)
def read(cls, lines): return cls( children=tokenize_span("".join([line.strip() for line in lines])))
def test_render_image(self): token = next(iter(tokenize_span("![image](foo.jpg)"))) expected = "!foo.jpg!" actual = self.renderer.render(token) self.assertEqual(expected, actual)
def textFormatTest(self, inputTemplate, outputTemplate): input = self.genRandomString(80, False) token = next(iter(tokenize_span(inputTemplate.format(input)))) expected = outputTemplate.format(input) actual = self.renderer.render(token) self.assertEqual(expected, actual)
def test_parse(data_regression): with GithubWikiRenderer(): source = "text with [[wiki | target]]" data_regression.check( serialize_tokens(tokenize_span(source), as_dict=True))
def test_render(file_regression): with GithubWikiRenderer() as renderer: token = tokenize_span("[[wiki|target]]")[0] file_regression.check(renderer.render(token), extension=".html")
def test_parse_with_children(data_regression): with GithubWikiRenderer(): source = "[[*alt*|link]]" data_regression.check( serialize_tokens(tokenize_span(source), as_dict=True))