def test_title(siteroot: SiteRoot) -> None: src1 = """ title: Title text hi""" (ctx,) = create_contexts(siteroot, srcs=[("doc.html", src1)]) assert ctx.content.build_title(ctx) == "Title text" src2 = """<span>01234 567890 abcddefg</span> <h1>header</h1> """ (ctx,) = create_contexts( siteroot, srcs=[("doc.html", src2)], ) assert ctx.content.build_title(ctx) == "doc" (ctx,) = create_contexts( siteroot, srcs=[("doc.html", src2)], config={"title_fallback": "abstract", "title_abstract_len": 6}, ) assert ctx.content.build_title(ctx) == "01234 5" (ctx,) = create_contexts( siteroot, srcs=[("doc.html", src2)], config={"title_fallback": "header"}, ) assert ctx.content.build_title(ctx) == "header"
def test_headers(siteroot: SiteRoot) -> None: src1 = """ {% for id, tag, text in page.headers %} [{{id}}, {{tag}}, {{text}}] {% endfor %} {% for id, tag, text in page.header_anchors %} [{{id}}, {{tag}}, {{text}}] {% endfor %} <h1>text</h1> <h1>text</h1> """ (ctx,) = create_contexts(siteroot, srcs=[("doc.html", src1)]) proxy = context.ContentProxy(ctx, ctx.content) html = proxy.html assert "[h_doc_html_text, h1, text]" in html assert "[h_doc_html_text_1, h1, text]" in html soup = BeautifulSoup(html, "html.parser") assert soup.select("#h_doc_html_text")[0].text == "text" assert soup.select("#h_doc_html_text_1")[0].text == "text"
def test_path_to(siteroot: SiteRoot) -> None: ctx1, ctx2, ctx3 = create_contexts( siteroot, srcs=[ ("a/b/c/doc1.html", ""), ("a/b/c/doc2.html", ""), ("a/b/d/doc3.html", ""), ], ) proxy = context.ContentProxy(ctx1, ctx1.content) path = proxy.path_to("/a/b/c/doc2.html") assert path == "doc2.html" path = proxy.path_to("/a/b/d/doc3.html") assert path == "../d/doc3.html" path = proxy.path_to("../d/doc3.html", fragment="fragment1") assert path == "../d/doc3.html#fragment1" path = proxy.path_to("../d/doc3.html", abs_path=True) assert path == "http://localhost:8888/a/b/d/doc3.html" ctx1.content.use_abs_path = True path = proxy.path_to("../d/doc3.html") assert path == "http://localhost:8888/a/b/d/doc3.html"
def test_link_xref(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "doc1.html", """ <h1>doc1-header1</h1> {{page.link_to("doc2.html", fragment="h_doc2_html_doc2_header1")}} """, ), ( "doc2.html", """<h1>doc2-header1</h1> {{page.link_to("doc1.html", fragment="h_doc1_html_doc1_header1")}} """, ), ], ) proxy1 = context.ContentProxy(ctx1, ctx1.content) soup = BeautifulSoup(proxy1.html, "html.parser") a = soup.find_all("a")[-1] assert a["href"] == "doc2.html#h_doc2_html_doc2_header1" assert a.text == "doc2-header1" proxy2 = proxy1.load("doc2.html") soup = BeautifulSoup(proxy2.html, "html.parser") a = soup.find_all("a")[-1] assert a["href"] == "doc1.html#h_doc1_html_doc1_header1" assert a.text == "doc1-header1"
def test_link_recurse(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "doc1.html", """ <div class="header_target" id="doc1_id1"> <h1>doc1-header1 {{content.link_to("doc2.html", fragment="doc2_id1")}} </h1> </div> """, ), ( "doc2.html", """ <div class="header_target" id="doc2_id1"> <h1>doc2-header1 {{content.link_to("doc1.html", fragment="doc1_id1")}} </h1> </div> """, ), ], ) proxy1 = context.ContentProxy(ctx1, ctx1.content) soup = BeautifulSoup(proxy1.html, "html.parser") a = soup.find_all("a")[-1] assert "Circular reference detected" in a.text proxy2 = context.ContentProxy(ctx1, ctx2.content) soup = BeautifulSoup(proxy2.html, "html.parser") a = soup.find_all("a")[-1] assert "Circular reference detected" in a.text
def test_get_headers(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", """ <h1>header1{{1+1}}</h1> <div>body1</div> <h2>header2{{2+2}}</h2> <div>body2</div> <div class="header_target" id="abcdefg"></div> <h2>header3{{3+3}}</h2> <div>body3</div> """, )], ) headers = ctx.content.get_headers(ctx) assert headers == [ context.HTMLIDInfo(id="h_doc_html_header12", tag="h1", text="header12"), context.HTMLIDInfo(id="h_doc_html_header24", tag="h2", text="header24"), context.HTMLIDInfo(id="h_doc_html_header36", tag="h2", text="header36"), ]
def test_load(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ :jinja:`{}` .. code-block:: html :caption: caption :jinja:`{{}}` """, )], ) assert ctx.content.body assert (ctx.content.body.strip() == b"""<p>{}</p> <div class="code-block"> <div class="code-block-caption">caption</div> <div class="highlight"><pre><span></span>:jinja:`{{}}` </pre></div> </div>""")
def test_short_header_id(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", """--- short_header_id: true --- <h1>header1{{1+1}}</h1> <div>body1</div> <h2>header2{{2+2}}</h2> <div>body2</div> <div class="header_target" id="abcdefg"></div> <h2>header3{{3+3}}</h2> <div>body3</div> """, )], ) headers = ctx.content.get_headers(ctx) assert headers == [ context.HTMLIDInfo(id="2rjo6O", tag="h1", text="header12"), context.HTMLIDInfo(id="1iF0qA", tag="h2", text="header24"), context.HTMLIDInfo(id="3dYoq9", tag="h2", text="header36"), ]
def test_jinja_str_syntaxerr(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "abc/index.rst", "", )], ) src = """1 2 3 {{ @ }} 4 5 6 """ try: context.eval_jinja(ctx, ctx.content, "propname", src, {}) except exceptions.JinjaEvalError as e: assert e.errors[0][1] == 4 assert ">>> {{ @ }}" in e.errors[0][2] else: assert False
def test_props_date_fromfilenane(siteroot: SiteRoot) -> None: (ctx,) = create_contexts( siteroot, srcs=[("20200101.html", "hi")], config={"timezone": "UTC"} ) proxy = context.ContentProxy(ctx, ctx.content) assert str(proxy.date) == "2020-01-01 00:00:00+00:00" (ctx,) = create_contexts( siteroot, srcs=[("2020-01-01T0203.html", "hi")], config={"timezone": "UTC"} ) proxy = context.ContentProxy(ctx, ctx.content) assert str(proxy.date) == "2020-01-01 02:03:00+00:00" (ctx,) = create_contexts( siteroot, srcs=[("2020-::::::::.html", "hi")], config={"timezone": "UTC"} ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.date is None
def test_props_date(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("doc.html", "hi")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.date is None (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.html", """ date: 2020-01-01 00:00:00+09:00 """, ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert str(proxy.date) == "2020-01-01 00:00:00+09:00"
def test_build(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", "{{page.title}}", )], ) (path, ) = ctx.build() assert path.filename == ctx.site.outputdir / "doc.html"
def test_xref(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ .. target:: anchor-name """, )], ) assert ctx.content.body == b"""<div class="header_target" id="anchor-name"></div>"""
def test_filename(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("doc.md", "")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.filename == "doc.html" assert proxy.stem == "doc" assert proxy.ext == ".html" (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.md", """ filename: abc.def #""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.filename == "abc.def" (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.md", """ stem: 111 ext: .222 #""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.filename == "111.222"
def test_title(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ title1 http://example.com ----------------------------- abc """, )], ) assert ctx.content.src.metadata["title"] == "title1 http://example.com"
def test_load2(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ .. {{ page.site_title }} -- """, )], ) assert ctx.content.body assert (ctx.content.body.strip() == b"<!-- {{ page.site_title }} - - -->")
def test_tags(siteroot: SiteRoot) -> None: (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.html", """ tags: tag1, tag2 """, ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.tags == ["tag1", "tag2"]
def test_props(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("docfile.html", "hi")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.abstract_length == 256 assert proxy.article_template == "page_article.html" assert not proxy.category assert proxy.canonical_url is None assert proxy.charset == "utf-8" assert proxy.draft is False assert proxy.html == "hi" assert proxy.lang == "en-US" assert proxy.order == 0 assert proxy.site_title == "(FIXME-site_title)" assert proxy.site_url == "http://localhost:8888/" assert proxy.timezone == tzlocal.get_localzone().zone assert proxy.title == "docfile"
def test_get_abstract(siteroot: SiteRoot) -> None: body = "<div>123<div>456<div>789<div>abc</div>def</div>ghi</div>jkl</div>" maxlen = len("".join(re.sub(r"<[^>]*>", "", body).split())) (ctx,) = create_contexts(siteroot, srcs=[("doc.html", body)]) def to_plain(s: str) -> str: return "".join(re.sub(r"<[^>]*>", "", abstract).split()) abstract = ctx.content.build_abstract(ctx) txt = to_plain(abstract) assert len(txt) == maxlen for i in range(1, maxlen + 1): abstract = ctx.content.build_abstract(ctx, i) txt = to_plain(abstract) assert len(txt) == min(i, maxlen)
def test_pygments(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ .. code-block:: html :caption: caption :linenos: :jinja:`{{}}` """, )], ) assert ctx.content.body assert b":jinja:`{{}}`" in ctx.content.body
def test_articledirective(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ .. article:: :date: 2017-01-01 :title: title<> test """, )], ) metadata = ctx.content.src.metadata assert metadata["date"] == "2017-01-01" assert metadata["title"] == "title<>"
def test_jinjadirective(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ .. jinja:: {{<a><b>}} <a><b> :jinja:`{{abc}}` """, )], ) assert ctx.content.body == (b"""{{<a><b>}} <a><b><p>{{abc}}</p> """)
def test_header_target(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", """ <div class="header_target" id="xyz"> <h1>header_xyz{{1+1}}</h1> </div> <div>body1</div> """, )], ) proxy = context.ContentProxy(ctx, ctx.content) link = proxy.link(fragment="xyz") soup = BeautifulSoup(link, "html.parser") assert soup.a.text == "header_xyz2" assert soup.a["href"] == "doc.html#xyz"
def test_ga(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "index.rst", """ title ---------------- .. jinja:: {{ macros.google_analytics() }} """, )], ) ctx.site.config.add("/", {"ga_tracking_id": "12345"}) (output, ) = ctx.build() assert "id=12345" in output.filename.read_text()
def test_subtitle(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc1.rst", """ title1 -------------- title2 =========== abc """, )], ) text = str(ctx.content.body) assert "title1" not in text assert "<h1>title2</h1>" in text
def test_contentsproxy(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "a/b/c/doc1.html", """ tags: tag1 """, ), ( "a/b/doc2.html", """ tags: tag2 """, ), ], ) proxy = context.ContentsProxy(ctx1, ctx1.content) assert ctx2.content is proxy.get_content("/a/b/doc2.html").content assert ctx2.content is proxy.get_content("../doc2.html").content assert ctx2.content is proxy["../doc2.html"].content assert [ctx1.content] == [ c.content for c in proxy.get_contents(filters={"tags": "tag1"}) ] assert [ctx1.content ] == [c.content for c in proxy.get_contents(subdirs=["/a/b/c"])] assert {ctx1.content, ctx2.content } == {c.content for c in proxy.get_contents(subdirs=["/a/b"])} assert {ctx2.content} == { c.content for c in proxy.get_contents(subdirs=["/a/b"], recurse=False) } (tags1, files1), (tags2, files2) = sorted(proxy.group_items(group="tags")) assert tags1 == ("tag1", ) assert [ctx1.content] == [c.content for c in files1] assert tags2 == ("tag2", ) assert [ctx2.content] == [c.content for c in files2]
def test_get_plain_abstract(siteroot: SiteRoot) -> None: body = """ <div> 1 23<div>4 56<div>789<div>abc</div>def</div>ghi</div>jkl</div> """ maxlen = len("".join(re.sub(r"<[^>]*>", "", body).split())) (ctx,) = create_contexts(siteroot, srcs=[("doc.html", body)]) abstract = ctx.content.build_abstract(ctx, plain=True) assert len("".join(abstract.split())) == maxlen for i in range(1, maxlen + 1): abstract = ctx.content.build_abstract(ctx, i, plain=True) assert len("".join(abstract.split())) == min(i, maxlen)
def test_search_header(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", """ <h1>header1{{1+1}}</h1> <div>body1</div> <h2>header2{{2+2}}</h2> <div>body2</div> {{ page.link_to(page, search="header2") }} """, )], ) proxy = context.ContentProxy(ctx, ctx.content) soup = BeautifulSoup(proxy.html, "html.parser") assert soup.find_all("a")[-1].text == "header24"
def test_configproxy(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "a/b/c/doc1.html", """ """, ), ( "a/b/c/doc2.yaml", """ type: config prop1: value1 """, ), ( "a/b/doc3.html", """ """, ), ( "a/b/doc4.yaml", """ type: config prop1: value2 prop2: value3 """, ), ], ) proxy = context.ConfigProxy(ctx1, ctx1.content) assert "value1" == proxy["prop1"] assert "value3" == proxy["prop2"] assert "value1" == proxy.get(None, "prop1") assert "value1" == proxy.get(".", "prop1") assert "value2" == proxy.get("..", "prop1") with pytest.raises(exceptions.ConfigNotFound): assert proxy["prop3"]
def test_jinja_err_both(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "abc/index.rst", """ .. jinja:: 12345 {{ = }} <a><b> """, )], ) with pytest.raises(exceptions.JinjaEvalError) as excinfo: ctx.build() e = excinfo.value assert len(e.errors) == 2 assert "page_article.html" in e.errors[0][0] assert "abc/index.rst#html" in e.errors[1][0] assert ">>> {{ = }}" in e.errors[1][2]