Beispiel #1
0
def test_attributes_marker_in_text():
    lex = MainLexer()

    lex.process("Not [attributes]")

    assert lex.tokens == [
        Text("Not [attributes]"),
        EOL,
        EOF,
    ]
Beispiel #2
0
def test_empty_header():
    lex = MainLexer()

    lex.process("=")

    assert lex.tokens == [
        Text("="),
        EOL,
        EOF,
    ]
Beispiel #3
0
def test_header_markers_in_text():
    lex = MainLexer()

    lex.process("Definitely not a === header")

    assert lex.tokens == [
        Text("Definitely not a === header"),
        EOL,
        EOF,
    ]
Beispiel #4
0
def test_import_directive(mock_file):
    lex = MainLexer()

    lex.process("::#include:/path/to/file")

    assert lex.tokens == [
        Text("just some data"),
        EOL,
        EOF,
    ]
Beispiel #5
0
def test_comment():
    lex = MainLexer()

    lex.process("// Some comment")

    assert lex.tokens == [
        Text("// Some comment"),
        EOL,
        EOF,
    ]
Beispiel #6
0
def test_variable_marker_in_text():
    lex = MainLexer()

    lex.process("Not a :variable:")

    assert lex.tokens == [
        Text("Not a :variable:"),
        EOL,
        EOF,
    ]
Beispiel #7
0
def test_unlisted_header():
    lex = MainLexer()

    lex.process("==! Header")

    assert lex.tokens == [
        Literal("==!"),
        WS(" "),
        Text("Header"),
        EOL,
        EOF,
    ]
Beispiel #8
0
def test_include_content_positions():
    lex = MainLexer()

    lex.process("<< type:/path/to/it.jpg")

    assert [i.position for i in lex.tokens] == [
        (0, 0),
        (0, 2),
        (0, 3),
        (0, 23),
        (1, 0),
    ]
Beispiel #9
0
def test_variable_negation():
    lex = MainLexer()

    lex.process(":!variable:")

    assert lex.tokens == [
        Literal(":"),
        Text("!variable"),
        Literal(":"),
        EOL,
        EOF,
    ]
Beispiel #10
0
def test_ordered_list_multiple_stars():
    lex = MainLexer()

    lex.process("### Item")

    assert lex.tokens == [
        Literal("###"),
        WS(" "),
        Text("Item"),
        EOL,
        EOF,
    ]
Beispiel #11
0
def test_unordered_list_trailing_space():
    lex = MainLexer()

    lex.process("*       Item")

    assert lex.tokens == [
        Literal("*"),
        WS("       "),
        Text("Item"),
        EOL,
        EOF,
    ]
Beispiel #12
0
def test_attributes():
    lex = MainLexer()

    lex.process("[name]")

    assert lex.tokens == [
        Literal("["),
        Text("name"),
        Literal("]"),
        EOL,
        EOF,
    ]
Beispiel #13
0
def test_multiple_header_markers():
    lex = MainLexer()

    lex.process("=== Header")

    assert lex.tokens == [
        Literal("==="),
        WS(" "),
        Text("Header"),
        EOL,
        EOF,
    ]
Beispiel #14
0
def test_include_content_multiple_spaces_after_mark():
    lex = MainLexer()

    lex.process("<<      type:/path/to/it.jpg")

    assert lex.tokens == [
        Literal("<<"),
        WS("      "),
        Text("type:/path/to/it.jpg"),
        EOL,
        EOF,
    ]
Beispiel #15
0
def test_header_marker_in_header_text():
    lex = MainLexer()

    lex.process("= a=b")

    assert lex.tokens == [
        Literal("="),
        WS(" "),
        Text("a=b"),
        EOL,
        EOF,
    ]
Beispiel #16
0
def test_command_without_arguments():
    lex = MainLexer()

    lex.process("::command:")

    assert lex.tokens == [
        Literal("::"),
        Text("command"),
        Literal(":"),
        EOL,
        EOF,
    ]
Beispiel #17
0
def test_command():
    lex = MainLexer()

    lex.process("::command:arg0,arg1")

    assert lex.tokens == [
        Literal("::"),
        Text("command"),
        Literal(":"),
        Text("arg0,arg1"),
        EOL,
        EOF,
    ]
Beispiel #18
0
def test_variable_definition():
    lex = MainLexer()

    lex.process(":variable:value123")

    assert lex.tokens == [
        Literal(":"),
        Text("variable"),
        Literal(":"),
        Text("value123"),
        EOL,
        EOF,
    ]
Beispiel #19
0
def test_variable_definition_accepted_characters():
    lex = MainLexer()

    lex.process(":abcAB.C0123-_:value123")

    assert lex.tokens == [
        Literal(":"),
        Text("abcAB.C0123-_"),
        Literal(":"),
        Text("value123"),
        EOL,
        EOF,
    ]
Beispiel #20
0
def test_directive_escape(mock_process_directive):
    lex = MainLexer()

    lex.process("::\\#name:/path/to/file")

    assert not mock_process_directive.called

    assert lex.tokens == [
        Literal("::"),
        Text("#name"),
        Literal(":"),
        Text("/path/to/file"),
        EOL,
        EOF,
    ]
Beispiel #21
0
def test_include_content_with_arguments():
    lex = MainLexer()

    lex.process("<< type:/path/to/it.jpg(value1,argument2=value2)")

    assert lex.tokens == [
        Literal("<<"),
        WS(" "),
        Text("type:/path/to/it.jpg"),
        Literal("("),
        Text("value1,argument2=value2"),
        Literal(")"),
        EOL,
        EOF,
    ]
Beispiel #22
0
def test_title():
    lex = MainLexer()

    lex.process(
        dedent("""
            .A title
            Some text
            """))

    assert lex.tokens == [
        Literal("."),
        Text("A title"),
        EOL,
        Text("Some text"),
        EOL,
        EOF,
    ]
Beispiel #23
0
def test_id():
    lex = MainLexer()

    lex.process(
        dedent("""
            #someid
            Some text
            """))

    assert lex.tokens == [
        Literal("#"),
        Text("someid"),
        EOL,
        Text("Some text"),
        EOL,
        EOF,
    ]
Beispiel #24
0
def test_title_multiple_spaces_after_mark():
    lex = MainLexer()

    lex.process(
        dedent("""
            .     A title with spaces
            Some text
            """))

    assert lex.tokens == [
        Literal("."),
        WS("     "),
        Text("A title with spaces"),
        EOL,
        Text("Some text"),
        EOL,
        EOF,
    ]
Beispiel #25
0
def test_multiple_lines_poistions():
    lex = MainLexer()

    lex.process(
        dedent("""
            This is text
            split into multiple lines

            with an empty line
            """))

    assert [i.position for i in lex.tokens] == [
        (0, 0),
        (0, 12),
        (1, 0),
        (1, 25),
        (2, 0),
        (3, 0),
        (3, 18),
        (4, 0),
    ]
Beispiel #26
0
def test_multiple_lines():
    lex = MainLexer()

    lex.process(
        dedent("""
            This is text
            split into multiple lines

            with an empty line
            """))

    assert lex.tokens == [
        Text("This is text"),
        EOL,
        Text("split into multiple lines"),
        EOL,
        EOL,
        Text("with an empty line"),
        EOL,
        EOF,
    ]
Beispiel #27
0
    def __init__(self, variables=None):
        super().__init__()

        self.lexer = MainLexer()

        self.variables = copy.deepcopy(variables) if variables else {}
        self.headers = []
        self.footnotes = []
        self.blocks = {}
        self.toc = []

        self._args = []
        self._kwargs = {}
        self._title = None
Beispiel #28
0
def test_multiline_comment():
    lex = MainLexer()

    lex.process(
        dedent("""
            ////
            Some comment

               another line
            ////
            """))

    assert lex.tokens == [
        Literal("////"),
        EOL,
        Text("Some comment"),
        EOL,
        EOL,
        Text("   another line"),
        EOL,
        Literal("////"),
        EOL,
        EOF,
    ]
Beispiel #29
0
    def __init__(self, variables=None):
        super().__init__()

        self.lexer = MainLexer()

        self.variables = copy.deepcopy(variables) if variables else {}
        self.headers = []
        self.footnotes = []
        self.blocks = {}
        self.toc = []

        self._args = []
        self._kwargs = {}
        self._title = None

        self.header_anchor = self.variables.get("mau.header_anchor_function",
                                                header_anchor)
Beispiel #30
0
def test_horizontal_rule():
    lex = MainLexer()

    lex.process("---")

    assert lex.tokens == [Literal("---"), EOL, EOF]