Esempio n. 1
0
def test_normal_text():
    lex = TextLexer()

    lex.process("Normal text")

    assert lex.tokens == [
        Text("Normal"),
        Text(" "),
        Text("text"),
        EOL,
        EOF,
    ]
Esempio n. 2
0
def test_backtick():
    lex = TextLexer()

    lex.process("`backtick`")

    assert lex.tokens == [
        Literal("`"),
        Text("backtick"),
        Literal("`"),
        EOL,
        EOF,
    ]
Esempio n. 3
0
def test_round_brackets():
    lex = TextLexer()

    lex.process("(round)")

    assert lex.tokens == [
        Literal("("),
        Text("round"),
        Literal(")"),
        EOL,
        EOF,
    ]
Esempio n. 4
0
def test_star():
    lex = TextLexer()

    lex.process("*star*")

    assert lex.tokens == [
        Literal("*"),
        Text("star"),
        Literal("*"),
        EOL,
        EOF,
    ]
Esempio n. 5
0
def test_underscore():
    lex = TextLexer()

    lex.process("_underscore_")

    assert lex.tokens == [
        Literal("_"),
        Text("underscore"),
        Literal("_"),
        EOL,
        EOF,
    ]
Esempio n. 6
0
def test_curly_braces():
    lex = TextLexer()

    lex.process("{curly}")

    assert lex.tokens == [
        Literal("{"),
        Text("curly"),
        Literal("}"),
        EOL,
        EOF,
    ]
Esempio n. 7
0
def test_square_brackets():
    lex = TextLexer()

    lex.process("[square]")

    assert lex.tokens == [
        Literal("["),
        Text("square"),
        Literal("]"),
        EOL,
        EOF,
    ]
Esempio n. 8
0
def test_escaped_quotes():
    lex = TextLexer()

    lex.process(r"\"quotes\"")

    assert lex.tokens == [
        Literal("\\"),
        Literal('"'),
        Text("quotes"),
        Literal("\\"),
        Literal('"'),
        EOL,
        EOF,
    ]
Esempio n. 9
0
def test_escaped_curly_braces():
    lex = TextLexer()

    lex.process(r"\{curly\}")

    # Escaped characters are TEXT tokens, not LITERAL
    assert lex.tokens == [
        Literal("\\"),
        Literal("{"),
        Text("curly"),
        Literal("\\"),
        Literal("}"),
        EOL,
        EOF,
    ]
Esempio n. 10
0
def test_single_class():
    lex = TextLexer()

    lex.process("[name]#content#")

    assert lex.tokens == [
        Literal("["),
        Text("name"),
        Literal("]"),
        Literal("#"),
        Text("content"),
        Literal("#"),
        EOL,
        EOF,
    ]
Esempio n. 11
0
def test_escaped_square_brackets():
    lex = TextLexer()

    lex.process(r"\[square\]")

    # Escaped characters are TEXT tokens, not LITERAL
    assert lex.tokens == [
        Literal("\\"),
        Literal("["),
        Text("square"),
        Literal("\\"),
        Literal("]"),
        EOL,
        EOF,
    ]
Esempio n. 12
0
def test_escaped_underscore():
    lex = TextLexer()

    lex.process(r"\_underscore\_")

    # Escaped characters are TEXT tokens, not LITERAL
    assert lex.tokens == [
        Literal("\\"),
        Literal("_"),
        Text("underscore"),
        Literal("\\"),
        Literal("_"),
        EOL,
        EOF,
    ]
Esempio n. 13
0
def test_macro_named_attributes():
    lex = TextLexer()

    lex.process("[macro](attr1=value1,attr2=value2)")

    assert lex.tokens == [
        Literal("["),
        Text("macro"),
        Literal("]"),
        Literal("("),
        Text("attr1=value1,attr2=value2"),
        Literal(")"),
        EOL,
        EOF,
    ]
Esempio n. 14
0
def test_macro():
    lex = TextLexer()

    lex.process("[macro](value1,value2)")

    assert lex.tokens == [
        Literal("["),
        Text("macro"),
        Literal("]"),
        Literal("("),
        Text("value1,value2"),
        Literal(")"),
        EOL,
        EOF,
    ]
Esempio n. 15
0
def test_multiple_classes():
    lex = TextLexer()

    lex.process("[name1,name2]#content#")

    assert lex.tokens == [
        Literal("["),
        Text("name1,name2"),
        Literal("]"),
        Literal("#"),
        Text("content"),
        Literal("#"),
        EOL,
        EOF,
    ]
Esempio n. 16
0
def test_escaped_round_brackets():
    lex = TextLexer()

    lex.process(r"\(round\)")

    # Escaped characters are TEXT tokens, not LITERAL
    assert lex.tokens == [
        Literal("\\"),
        Literal("("),
        Text("round"),
        Literal("\\"),
        Literal(")"),
        EOL,
        EOF,
    ]
Esempio n. 17
0
def test_class_content_with_styles():
    lex = TextLexer()

    lex.process("[name]#_content_#")

    assert lex.tokens == [
        Literal("["),
        Text("name"),
        Literal("]"),
        Literal("#"),
        Literal("_"),
        Text("content"),
        Literal("_"),
        Literal("#"),
        EOL,
        EOF,
    ]
Esempio n. 18
0
    def __init__(self, footnotes_start_with=1):
        super().__init__()

        self.lexer = TextLexer()

        self._styles = set()
        self._classes = set()
        self._verbatim = False

        self.footnotes_start_with = footnotes_start_with
        self.footnotes = []
        self._nodes = []
        self.nodes = []