def test_link_recurse(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "doc1.html", """ <div class="header_target" id="doc1_id1"> <h1>doc1-header1 {{content.link_to("doc2.html", fragment="doc2_id1")}} </h1> </div> """, ), ( "doc2.html", """ <div class="header_target" id="doc2_id1"> <h1>doc2-header1 {{content.link_to("doc1.html", fragment="doc1_id1")}} </h1> </div> """, ), ], ) proxy1 = context.ContentProxy(ctx1, ctx1.content) soup = BeautifulSoup(proxy1.html, "html.parser") a = soup.find_all("a")[-1] assert "Circular reference detected" in a.text proxy2 = context.ContentProxy(ctx1, ctx2.content) soup = BeautifulSoup(proxy2.html, "html.parser") a = soup.find_all("a")[-1] assert "Circular reference detected" in a.text
def test_link_xref(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "doc1.html", """ <h1>doc1-header1</h1> {{page.link_to("doc2.html", fragment="h_doc2_html_doc2_header1")}} """, ), ( "doc2.html", """<h1>doc2-header1</h1> {{page.link_to("doc1.html", fragment="h_doc1_html_doc1_header1")}} """, ), ], ) proxy1 = context.ContentProxy(ctx1, ctx1.content) soup = BeautifulSoup(proxy1.html, "html.parser") a = soup.find_all("a")[-1] assert a["href"] == "doc2.html#h_doc2_html_doc2_header1" assert a.text == "doc2-header1" proxy2 = proxy1.load("doc2.html") soup = BeautifulSoup(proxy2.html, "html.parser") a = soup.find_all("a")[-1] assert a["href"] == "doc1.html#h_doc1_html_doc1_header1" assert a.text == "doc1-header1"
def test_headers(siteroot: SiteRoot) -> None: src1 = """ {% for id, tag, text in page.headers %} [{{id}}, {{tag}}, {{text}}] {% endfor %} {% for id, tag, text in page.header_anchors %} [{{id}}, {{tag}}, {{text}}] {% endfor %} <h1>text</h1> <h1>text</h1> """ (ctx,) = create_contexts(siteroot, srcs=[("doc.html", src1)]) proxy = context.ContentProxy(ctx, ctx.content) html = proxy.html assert "[h_doc_html_text, h1, text]" in html assert "[h_doc_html_text_1, h1, text]" in html soup = BeautifulSoup(html, "html.parser") assert soup.select("#h_doc_html_text")[0].text == "text" assert soup.select("#h_doc_html_text_1")[0].text == "text"
def test_path_to(siteroot: SiteRoot) -> None: ctx1, ctx2, ctx3 = create_contexts( siteroot, srcs=[ ("a/b/c/doc1.html", ""), ("a/b/c/doc2.html", ""), ("a/b/d/doc3.html", ""), ], ) proxy = context.ContentProxy(ctx1, ctx1.content) path = proxy.path_to("/a/b/c/doc2.html") assert path == "doc2.html" path = proxy.path_to("/a/b/d/doc3.html") assert path == "../d/doc3.html" path = proxy.path_to("../d/doc3.html", fragment="fragment1") assert path == "../d/doc3.html#fragment1" path = proxy.path_to("../d/doc3.html", abs_path=True) assert path == "http://localhost:8888/a/b/d/doc3.html" ctx1.content.use_abs_path = True path = proxy.path_to("../d/doc3.html") assert path == "http://localhost:8888/a/b/d/doc3.html"
def test_props_date_fromfilenane(siteroot: SiteRoot) -> None: (ctx,) = create_contexts( siteroot, srcs=[("20200101.html", "hi")], config={"timezone": "UTC"} ) proxy = context.ContentProxy(ctx, ctx.content) assert str(proxy.date) == "2020-01-01 00:00:00+00:00" (ctx,) = create_contexts( siteroot, srcs=[("2020-01-01T0203.html", "hi")], config={"timezone": "UTC"} ) proxy = context.ContentProxy(ctx, ctx.content) assert str(proxy.date) == "2020-01-01 02:03:00+00:00" (ctx,) = create_contexts( siteroot, srcs=[("2020-::::::::.html", "hi")], config={"timezone": "UTC"} ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.date is None
def test_props_date(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("doc.html", "hi")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.date is None (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.html", """ date: 2020-01-01 00:00:00+09:00 """, ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert str(proxy.date) == "2020-01-01 00:00:00+09:00"
def test_load(siteroot: SiteRoot) -> None: siteroot.write_text(siteroot.contents / "A/B/C/file1.html", "A/B/C/file1.html") siteroot.write_text(siteroot.contents / "A/B/D/file2.html", "A/B/D/file1.html") site = siteroot.load({}, {}) jinjaenv = site.build_jinjaenv() ctx = context.JinjaOutput(site, jinjaenv, (("A", "B", "C"), "file1.html")) proxy = context.ContentProxy(ctx, ctx.content) file2 = proxy.load("../D/file2.html") assert file2.contentpath == (("A", "B", "D"), "file2.html")
def test_filename(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("doc.md", "")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.filename == "doc.html" assert proxy.stem == "doc" assert proxy.ext == ".html" (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.md", """ filename: abc.def #""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.filename == "abc.def" (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.md", """ stem: 111 ext: .222 #""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.filename == "111.222"
def test_tags(siteroot: SiteRoot) -> None: (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.html", """ tags: tag1, tag2 """, ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.tags == ["tag1", "tag2"]
def test_props(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("docfile.html", "hi")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.abstract_length == 256 assert proxy.article_template == "page_article.html" assert not proxy.category assert proxy.canonical_url is None assert proxy.charset == "utf-8" assert proxy.draft is False assert proxy.html == "hi" assert proxy.lang == "en-US" assert proxy.order == 0 assert proxy.site_title == "(FIXME-site_title)" assert proxy.site_url == "http://localhost:8888/" assert proxy.timezone == tzlocal.get_localzone().zone assert proxy.title == "docfile"
def test_header_target(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", """ <div class="header_target" id="xyz"> <h1>header_xyz{{1+1}}</h1> </div> <div>body1</div> """, )], ) proxy = context.ContentProxy(ctx, ctx.content) link = proxy.link(fragment="xyz") soup = BeautifulSoup(link, "html.parser") assert soup.a.text == "header_xyz2" assert soup.a["href"] == "doc.html#xyz"
def test_search_header(siteroot: SiteRoot) -> None: (ctx, ) = create_contexts( siteroot, srcs=[( "doc.html", """ <h1>header1{{1+1}}</h1> <div>body1</div> <h2>header2{{2+2}}</h2> <div>body2</div> {{ page.link_to(page, search="header2") }} """, )], ) proxy = context.ContentProxy(ctx, ctx.content) soup = BeautifulSoup(proxy.html, "html.parser") assert soup.find_all("a")[-1].text == "header24"
def test_imports(siteroot: SiteRoot) -> None: (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.html", """ imports: macro1.html, macro2.html <h1>header1-{{macro1.macro1("param")}}</h1> <h2>header2-{{macro2.macro2()}}</h2> """, ) ], ) (siteroot.templates / "macro1.html").write_text( """ {% macro macro1(msg) -%} param: {{msg}} {%- endmacro %} """ ) (siteroot.templates / "macro2.html").write_text( """ {% macro macro2() -%} macro2.macro2 {%- endmacro %} """ ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.imports == ["macro1.html", "macro2.html"] assert ">header1-param: param</h1>" in proxy.html assert ">header2-macro2.macro2</h2>" in proxy.html
def test_link(siteroot: SiteRoot) -> None: (ctx1, ctx2) = create_contexts( siteroot, srcs=[ ( "doc1.html", "", ), ( "doc2.html", """ <h1>he<span>a</span>der1</h1> <div>body1</div> <h2>header2</h2> <div>body2</div> """, ), ], ) proxy1 = context.ContentProxy(ctx1, ctx1.content) proxy2 = context.ContentProxy(ctx1, ctx2.content) link = proxy1.link_to("doc2.html") soup = BeautifulSoup(link, "html.parser") assert soup.a["href"] == "doc2.html" assert str(soup.a.text) == "doc2" link = proxy2.link() soup = BeautifulSoup(link, "html.parser") assert soup.a["href"] == "doc2.html" assert soup.a.text == "doc2" link = proxy2.link(text="<>text<>") soup = BeautifulSoup(link, "html.parser") assert soup.a.text == "<>text<>" link = proxy2.link(fragment="h_doc2_html_header1") soup = BeautifulSoup(link, "html.parser") assert soup.a["href"] == "doc2.html#h_doc2_html_header1" assert soup.a.text == "header1" link = proxy2.link(fragment="h_doc2_html_header1", text="text") soup = BeautifulSoup(link, "html.parser") assert soup.a.text == "text" link = proxy2.link(fragment="h_doc2_html_header1") soup = BeautifulSoup(link, "html.parser") assert soup.a["href"] == "doc2.html#h_doc2_html_header1" assert soup.a.text == "header1" link = proxy2.link(abs_path=True) soup = BeautifulSoup(link, "html.parser") assert soup.a["href"] == "http://localhost:8888/doc2.html" link = proxy2.link(fragment="h_doc2_html_header2") soup = BeautifulSoup(link, "html.parser") assert soup.a.text == "header2" link = proxy2.link(attrs={"class": "classname", "style": "border:solid"}) soup = BeautifulSoup(link, "html.parser") assert soup.a.text == "doc2" soup.a["class"] == "classname" soup.a["style"] == "border:solid"
def test_url(siteroot: SiteRoot) -> None: (ctx,) = create_contexts( siteroot, srcs=[ ( "doc.html", """ stem: aaaaa hi""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.url == "http://localhost:8888/aaaaa.html" (ctx,) = create_contexts( siteroot, srcs=[ ( "a/b/doc.html", """ filename: ../abc.html hi""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.url == "http://localhost:8888/a/abc.html" (ctx,) = create_contexts( siteroot, srcs=[ ( "a/b/doc.html", """ canonical_url: http://example.com/aaa.html hi""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.url == "http://example.com/aaa.html" (ctx,) = create_contexts( siteroot, srcs=[ ( "a/b/doc.html", """ canonical_url: ../abc.html hi""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.url == "http://localhost:8888/a/abc.html" (ctx,) = create_contexts( siteroot, srcs=[ ( "a/b/doc.html", """ canonical_url: abc.html hi""", ) ], ) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.url == "http://localhost:8888/a/b/abc.html"
def test_parent_dirs(siteroot: SiteRoot) -> None: (ctx,) = create_contexts(siteroot, srcs=[("a/b/c/doc.html", "")]) proxy = context.ContentProxy(ctx, ctx.content) assert proxy.parents_dirs == [(), ("a",), ("a", "b"), ("a", "b", "c")]