def test_foot_definition(name, source, data_regression):
    get_parse_context().block_tokens.insert_before(
        block_tokens_ext.Footnote, block_tokens.LinkDefinition
    )
    tree = serialize_tokens(tokenize_main(source), as_dict=True)
    footnotes = serialize_tokens(get_parse_context().foot_definitions, as_dict=True)
    data_regression.check(
        {
            "tree": tree,
            "footnotes": footnotes,
            "link_definitions": get_parse_context().link_definitions,
        },
        basename=f"test_foot_definitions_{name}",
    )
Пример #2
0
def test_fenced_code(name, source, data_regression):
    from mistletoe.base_elements import SourceLines

    print(SourceLines(source).lines)
    data_regression.check(
        serialize_tokens(tokenize_main(source), as_dict=True),
        basename=f"test_fenced_code_{name}",
    )
def test_foot_ref_span(name, source, data_regression):
    get_parse_context().foot_definitions["a"] = True
    _span_tokens = get_parse_context().span_tokens
    _span_tokens.insert_after(FootReference, CoreTokens)
    data_regression.check(
        serialize_tokens(tokenize_span(source), as_dict=True),
        basename=f"test_foot_ref_span_{name}",
    )
Пример #4
0
def test_link_definitions(name, source, data_regression):
    tree = serialize_tokens(tokenize_main(source), as_dict=True)
    data_regression.check(
        {
            "tree": tree,
            "link_definitions": get_parse_context().link_definitions
        },
        basename=f"test_link_definitions_{name}",
    )
def test_resolution(name, source, data_regression):
    get_parse_context().span_tokens.insert_after(FootReference, CoreTokens)
    get_parse_context().block_tokens.insert_before(
        block_tokens_ext.Footnote, block_tokens.LinkDefinition
    )
    data_regression.check(
        serialize_tokens(block_tokens.Document.read(source), as_dict=True),
        basename=f"test_resolution_{name}",
    )
Пример #6
0
def test_block_code(name, source, data_regression):
    data_regression.check(
        serialize_tokens(tokenize_main(source), as_dict=True),
        basename=f"test_block_code_{name}",
    )
Пример #7
0
def test_setext_heading(name, source, data_regression):
    data_regression.check(
        serialize_tokens(tokenize_main(source), as_dict=True),
        basename=f"test_setext_heading_{name}",
    )
Пример #8
0
def test_table_cell(data_regression):
    token = block_tokens_ext.TableCell.read("cell 2")
    data_regression.check(serialize_tokens(token, as_dict=True))
Пример #9
0
def test_table_row(name, source, row_align, data_regression):
    row = block_tokens_ext.TableRow.read(source, row_align=row_align)
    data_regression.check(serialize_tokens(row, as_dict=True),
                          basename=f"test_table_row_{name}")
Пример #10
0
def test_doc_read_store_link_defs(name, source, data_regression):
    data_regression.check(
        serialize_tokens(block_tokens.Document.read(source, skip_tokens=()),
                         as_dict=True),
        basename=f"test_doc_read_store_link_defs_{name}",
    )
Пример #11
0
def test_doc_read_with_front_matter(name, source, data_regression):
    data_regression.check(
        serialize_tokens(block_tokens.Document.read(source, front_matter=True),
                         as_dict=True),
        basename=f"test_doc_read_with_front_matter_{name}",
    )
Пример #12
0
def test_thematic_break(name, source, data_regression):
    data_regression.check(
        serialize_tokens(tokenize_main(source), as_dict=True),
        basename=f"test_thematic_break_{name}",
    )
Пример #13
0
def test_list_item(name, source, data_regression):
    data_regression.check(
        serialize_tokens(tokenize_main(source), as_dict=True),
        basename=f"test_list_item_{name}",
    )
def test_parse(data_regression):
    with GithubWikiRenderer():
        source = "text with [[wiki | target]]"
        data_regression.check(
            serialize_tokens(tokenize_span(source), as_dict=True))
def test_parse_with_children(data_regression):
    with GithubWikiRenderer():
        source = "[[*alt*|link]]"
        data_regression.check(
            serialize_tokens(tokenize_span(source), as_dict=True))