def render_internal_link(self, token: SyntaxTreeNode) -> None: """Render link token `[text](link "title")`, where the link has not been identified as an external URL. """ destination = unquote(cast(str, token.attrGet("href") or "")) # make the path relative to an "including" document # this is set when using the `relative-docs` option of the MyST `include` directive relative_include = self.md_env.get("relative-docs", None) if relative_include is not None and destination.startswith( relative_include[0]): source_dir, include_dir = relative_include[1:] destination = os.path.relpath( os.path.join(include_dir, os.path.normpath(destination)), source_dir) potential_path = ( Path(self.doc_env.doc2path(self.doc_env.docname)).parent / destination if self.doc_env.srcdir # not set in some test situations else None) if (potential_path and potential_path.is_file() and not any( destination.endswith(suffix) for suffix in self.doc_env.config.source_suffix)): wrap_node = addnodes.download_reference( refdoc=self.doc_env.docname, reftarget=destination, reftype="myst", refdomain=None, # Added to enable cross-linking refexplicit=len(token.children or []) > 0, refwarn=False, ) classes = ["xref", "download", "myst"] text = destination if not token.children else "" else: wrap_node = addnodes.pending_xref( refdoc=self.doc_env.docname, reftarget=destination, reftype="myst", refdomain=None, # Added to enable cross-linking refexplicit=len(token.children or []) > 0, refwarn=True, ) classes = ["xref", "myst"] text = "" self.add_line_and_source_path(wrap_node, token) title = token.attrGet("title") if title: wrap_node["title"] = title self.current_node.append(wrap_node) inner_node = nodes.inline("", text, classes=classes) wrap_node.append(inner_node) with self.current_node_context(inner_node): self.render_children(token)
def test_pretty(file_regression): md = MarkdownIt("commonmark") tokens = md.parse(""" # Header Here's some text and an image ![title](image.png) 1. a **list** > a *quote* """) node = SyntaxTreeNode(tokens) file_regression.check(node.pretty(indent=2, show_text=True), extension=".xml")
def test_walk(): tokens = MarkdownIt().parse(EXAMPLE_MARKDOWN) tree = SyntaxTreeNode(tokens) expected_node_types = ( "root", "heading", "inline", "text", "paragraph", "inline", "text", "strong", "text", "text", ) for node, expected_type in zip(tree.walk(), expected_node_types): assert node.type == expected_type
def test_type(): tokens = MarkdownIt().parse(EXAMPLE_MARKDOWN) tree = SyntaxTreeNode(tokens) # Root type is "root" assert tree.type == "root" # "_open" suffix must be stripped from nested token type assert tree.children[0].type == "heading" assert tree[0].type == "heading" # For unnested tokens, node type must remain same as token type assert tree.children[0].children[0].type == "inline"
def render_colon_fence(self, token: SyntaxTreeNode) -> None: """Render a code fence with ``:`` colon delimiters.""" if token.content.startswith(":::"): # the content starts with a nested fence block, # but must distinguish between ``:options:``, so we add a new line # TODO: shouldn't access private attribute token._attribute_token().content = "\n" + token.content return self.render_fence(token)
def render_colon_fence(self, token: SyntaxTreeNode) -> None: """Render a code fence with ``:`` colon delimiters.""" if token.content.startswith(":::"): # the content starts with a nested fence block, # but must distinguish between ``:options:``, so we add a new line assert token.token is not None, '"colon_fence" must have a `token`' linear_token = token.token.copy() linear_token.content = "\n" + linear_token.content token.token = linear_token return self.render_fence(token)
def _render_tokens(self, tokens: List[Token]) -> None: """Render the tokens.""" # propagate line number down to inline elements for token in tokens: if not token.map: continue # For docutils we want 1 based line numbers (not 0) token.map = [token.map[0] + 1, token.map[1] + 1] for token_child in token.children or []: token_child.map = token.map # nest tokens node_tree = SyntaxTreeNode(tokens) # move footnote definitions to env self.md_env.setdefault("foot_refs", {}) for node in node_tree.walk(include_self=True): new_children = [] for child in node.children: if child.type == "footnote_reference": label = child.meta["label"] self.md_env["foot_refs"].setdefault(label, []).append(child) else: new_children.append(child) node.children = new_children # render for child in node_tree.children: # skip hidden? if f"render_{child.type}" in self.rules: self.rules[f"render_{child.type}"](child) else: self.create_warning( f"No render method for: {child.type}", line=token_line(child, default=0), subtype="render", append_to=self.current_node, )
def render_image(self, token: SyntaxTreeNode) -> None: img_node = nodes.image() self.add_line_and_source_path(img_node, token) destination = cast(str, token.attrGet("src") or "") if self.config.get("relative-images", None) is not None and not is_external_url( destination, None, True): # make the path relative to an "including" document destination = os.path.normpath( os.path.join( self.config.get("relative-images", ""), os.path.normpath(destination), )) img_node["uri"] = destination img_node["alt"] = self.renderInlineAsText(token.children or []) title = token.attrGet("title") if title: img_node["title"] = token.attrGet("title") self.current_node.append(img_node)
def test_sibling_traverse(): tokens = MarkdownIt().parse(EXAMPLE_MARKDOWN) tree = SyntaxTreeNode(tokens) paragraph_inline_node = tree.children[1].children[0] text_node = paragraph_inline_node.children[0] assert text_node.type == "text" strong_node = text_node.next_sibling assert strong_node.type == "strong" another_text_node = strong_node.next_sibling assert another_text_node.type == "text" assert another_text_node.next_sibling is None assert another_text_node.previous_sibling.previous_sibling == text_node assert text_node.previous_sibling is None
def render_link(self, token: SyntaxTreeNode) -> None: if token.markup == "autolink": return self.render_autolink(token) ref_node = nodes.reference() self.add_line_and_source_path(ref_node, token) destination = cast(str, token.attrGet("href") or "") if self.config.get("relative-docs", None) is not None and destination.startswith( self.config["relative-docs"][0]): # make the path relative to an "including" document source_dir, include_dir = self.config["relative-docs"][1:] destination = os.path.relpath( os.path.join(include_dir, os.path.normpath(destination)), source_dir) ref_node["refuri"] = destination # type: ignore[index] title = token.attrGet("title") if title: ref_node["title"] = title # type: ignore[index] next_node = ref_node # TODO currently any reference with a fragment # is deemed external # (if anchors are not enabled) # This comes from recommonmark, but I am not sure of the rationale for it if is_external_url( destination, self.config.get("myst_url_schemes", None), "heading_anchors" not in self.config.get("myst_extensions", []), ): self.current_node.append(next_node) with self.current_node_context(ref_node): self.render_children(token) else: self.handle_cross_reference(token, destination)
def test_property_passthrough(): tokens = MarkdownIt().parse(EXAMPLE_MARKDOWN) heading_open = tokens[0] tree = SyntaxTreeNode(tokens) heading_node = tree.children[0] assert heading_open.tag == heading_node.tag assert tuple(heading_open.map) == heading_node.map assert heading_open.level == heading_node.level assert heading_open.content == heading_node.content assert heading_open.markup == heading_node.markup assert heading_open.info == heading_node.info assert heading_open.meta == heading_node.meta assert heading_open.block == heading_node.block assert heading_open.hidden == heading_node.hidden
def handle_cross_reference(self, token: SyntaxTreeNode, destination: str) -> None: """Create nodes for references that are not immediately resolvable.""" wrap_node = addnodes.pending_xref( refdoc=self.doc_env.docname, reftarget=unquote(destination), reftype="myst", refdomain=None, # Added to enable cross-linking refexplicit=len(token.children or []) > 0, refwarn=True, ) self.add_line_and_source_path(wrap_node, token) title = token.attrGet("title") if title: wrap_node["title"] = title self.current_node.append(wrap_node) inner_node = nodes.inline("", "", classes=["xref", "myst"]) wrap_node.append(inner_node) with self.current_node_context(inner_node): self.render_children(token)
def render_heading(self, token: SyntaxTreeNode) -> None: """This extends the docutils method, to allow for the addition of heading ids. These ids are computed by the ``markdown-it-py`` ``anchors_plugin`` as "slugs" which are unique to a document. The approach is similar to ``sphinx.ext.autosectionlabel`` """ super().render_heading(token) if not isinstance(self.current_node, nodes.section): return # create the slug string slug = cast(str, token.attrGet("id")) if slug is None: return section = self.current_node doc_slug = self.doc_env.doc2path(self.doc_env.docname, base=False) + "#" + slug # save the reference in the standard domain, so that it can be handled properly domain = cast(StandardDomain, self.doc_env.get_domain("std")) if doc_slug in domain.labels: other_doc = self.doc_env.doc2path(domain.labels[doc_slug][0]) self.create_warning( f"duplicate label {doc_slug}, other instance in {other_doc}", line=section.line, subtype="anchor", ) labelid = section["ids"][0] domain.anonlabels[doc_slug] = self.doc_env.docname, labelid domain.labels[doc_slug] = ( self.doc_env.docname, labelid, clean_astext(section[0]), ) self.doc_env.metadata[self.doc_env.docname]["myst_anchors"] = True section["myst-anchor"] = doc_slug
def render_autolink(self, token: SyntaxTreeNode) -> None: refuri = target = escapeHtml(token.attrGet("href") or "") # type: ignore[arg-type] ref_node = nodes.reference(target, target, refuri=refuri) self.add_line_and_source_path(ref_node, token) self.current_node.append(ref_node)
def test_tree_to_tokens_conversion(): tokens = MarkdownIt().parse(EXAMPLE_MARKDOWN) tokens_after_roundtrip = SyntaxTreeNode(tokens).to_tokens() assert tokens == tokens_after_roundtrip