def test_preserve_escapes(): lex = PreprocessVariablesLexer() lex.process(r"Normal \text \_other\_") assert lex.tokens == [ Text("N"), Text("o"), Text("r"), Text("m"), Text("a"), Text("l"), Text(" "), Literal("\\"), Text("t"), Text("e"), Text("x"), Text("t"), Text(" "), Literal("\\"), Text("_"), Text("o"), Text("t"), Text("h"), Text("e"), Text("r"), Literal("\\"), Text("_"), Text("\n"), ]
def test_put_token(): p = init_parser("\n") assert p.get_token() == EOL p.put_token(Literal("*")) assert p.get_token() == Literal("*") assert p.get_token() == EOL assert p.get_token() == EOF
def test_variable_negation(): lex = MainLexer() lex.process(":!variable:") assert lex.tokens == [ Literal(":"), Text("!variable"), Literal(":"), EOL, EOF, ]
def test_attributes(): lex = MainLexer() lex.process("[name]") assert lex.tokens == [ Literal("["), Text("name"), Literal("]"), EOL, EOF, ]
def test_backtick(): lex = TextLexer() lex.process("`backtick`") assert lex.tokens == [ Literal("`"), Text("backtick"), Literal("`"), EOL, EOF, ]
def test_command_without_arguments(): lex = MainLexer() lex.process("::command:") assert lex.tokens == [ Literal("::"), Text("command"), Literal(":"), EOL, EOF, ]
def test_star(): lex = TextLexer() lex.process("*star*") assert lex.tokens == [ Literal("*"), Text("star"), Literal("*"), EOL, EOF, ]
def test_square_brackets(): lex = TextLexer() lex.process("[square]") assert lex.tokens == [ Literal("["), Text("square"), Literal("]"), EOL, EOF, ]
def test_curly_braces(): lex = TextLexer() lex.process("{curly}") assert lex.tokens == [ Literal("{"), Text("curly"), Literal("}"), EOL, EOF, ]
def test_round_brackets(): lex = TextLexer() lex.process("(round)") assert lex.tokens == [ Literal("("), Text("round"), Literal(")"), EOL, EOF, ]
def test_underscore(): lex = TextLexer() lex.process("_underscore_") assert lex.tokens == [ Literal("_"), Text("underscore"), Literal("_"), EOL, EOF, ]
def test_command(): lex = MainLexer() lex.process("::command:arg0,arg1") assert lex.tokens == [ Literal("::"), Text("command"), Literal(":"), Text("arg0,arg1"), EOL, EOF, ]
def test_variable_definition_accepted_characters(): lex = MainLexer() lex.process(":abcAB.C0123-_:value123") assert lex.tokens == [ Literal(":"), Text("abcAB.C0123-_"), Literal(":"), Text("value123"), EOL, EOF, ]
def test_variable_definition(): lex = MainLexer() lex.process(":variable:value123") assert lex.tokens == [ Literal(":"), Text("variable"), Literal(":"), Text("value123"), EOL, EOF, ]
def test_escaped_quotes(): lex = TextLexer() lex.process(r"\"quotes\"") assert lex.tokens == [ Literal("\\"), Literal('"'), Text("quotes"), Literal("\\"), Literal('"'), EOL, EOF, ]
def test_single_class(): lex = TextLexer() lex.process("[name]#content#") assert lex.tokens == [ Literal("["), Text("name"), Literal("]"), Literal("#"), Text("content"), Literal("#"), EOL, EOF, ]
def test_macro(): lex = TextLexer() lex.process("[macro](value1,value2)") assert lex.tokens == [ Literal("["), Text("macro"), Literal("]"), Literal("("), Text("value1,value2"), Literal(")"), EOL, EOF, ]
def test_include_content_with_arguments(): lex = MainLexer() lex.process("<< type:/path/to/it.jpg(value1,argument2=value2)") assert lex.tokens == [ Literal("<<"), WS(" "), Text("type:/path/to/it.jpg"), Literal("("), Text("value1,argument2=value2"), Literal(")"), EOL, EOF, ]
def test_escaped_round_brackets(): lex = TextLexer() lex.process(r"\(round\)") # Escaped characters are TEXT tokens, not LITERAL assert lex.tokens == [ Literal("\\"), Literal("("), Text("round"), Literal("\\"), Literal(")"), EOL, EOF, ]
def test_escaped_square_brackets(): lex = TextLexer() lex.process(r"\[square\]") # Escaped characters are TEXT tokens, not LITERAL assert lex.tokens == [ Literal("\\"), Literal("["), Text("square"), Literal("\\"), Literal("]"), EOL, EOF, ]
def test_escaped_underscore(): lex = TextLexer() lex.process(r"\_underscore\_") # Escaped characters are TEXT tokens, not LITERAL assert lex.tokens == [ Literal("\\"), Literal("_"), Text("underscore"), Literal("\\"), Literal("_"), EOL, EOF, ]
def test_macro_named_attributes(): lex = TextLexer() lex.process("[macro](attr1=value1,attr2=value2)") assert lex.tokens == [ Literal("["), Text("macro"), Literal("]"), Literal("("), Text("attr1=value1,attr2=value2"), Literal(")"), EOL, EOF, ]
def test_escaped_curly_braces(): lex = TextLexer() lex.process(r"\{curly\}") # Escaped characters are TEXT tokens, not LITERAL assert lex.tokens == [ Literal("\\"), Literal("{"), Text("curly"), Literal("\\"), Literal("}"), EOL, EOF, ]
def test_multiple_classes(): lex = TextLexer() lex.process("[name1,name2]#content#") assert lex.tokens == [ Literal("["), Text("name1,name2"), Literal("]"), Literal("#"), Text("content"), Literal("#"), EOL, EOF, ]
def test_multiple_named_arguments(): lex = ArgumentsLexer() lex.process("argument1=value1, argument2=value2") assert lex.tokens == [ Text("argument1"), Literal("="), Text("value1"), Literal(","), WS(" "), Text("argument2"), Literal("="), Text("value2"), ]
def test_directive_escape(mock_process_directive): lex = MainLexer() lex.process("::\\#name:/path/to/file") assert not mock_process_directive.called assert lex.tokens == [ Literal("::"), Text("#name"), Literal(":"), Text("/path/to/file"), EOL, EOF, ]
def test_escaped_quotes(): lex = ArgumentsLexer() lex.process(r"Argument \"with\" quotes") assert lex.tokens == [ Text("Argument"), WS(" "), Literal("\\"), Literal('"'), Text("with"), Literal("\\"), Literal('"'), WS(" "), Text("quotes"), ]
def test_match_only_backticks_and_curly_braces(): lex = PreprocessVariablesLexer() lex.process("Normal text `{curly}` _other_ *text*") assert lex.tokens == [ Text("N"), Text("o"), Text("r"), Text("m"), Text("a"), Text("l"), Text(" "), Text("t"), Text("e"), Text("x"), Text("t"), Text(" "), Literal("`"), Literal("{"), Text("c"), Text("u"), Text("r"), Text("l"), Text("y"), Literal("}"), Literal("`"), Text(" "), Text("_"), Text("o"), Text("t"), Text("h"), Text("e"), Text("r"), Text("_"), Text(" "), Text("*"), Text("t"), Text("e"), Text("x"), Text("t"), Text("*"), Text("\n"), ]
def test_escape_curly_braces(): lex = PreprocessVariablesLexer() lex.process(r"Normal text \{curly\} _other_ *text*") assert lex.tokens == [ Text("N"), Text("o"), Text("r"), Text("m"), Text("a"), Text("l"), Text(" "), Text("t"), Text("e"), Text("x"), Text("t"), Text(" "), Literal("\\"), Literal("{"), Text("c"), Text("u"), Text("r"), Text("l"), Text("y"), Literal("\\"), Literal("}"), Text(" "), Text("_"), Text("o"), Text("t"), Text("h"), Text("e"), Text("r"), Text("_"), Text(" "), Text("*"), Text("t"), Text("e"), Text("x"), Text("t"), Text("*"), Text("\n"), ]
def test_single_named_argument(): lex = ArgumentsLexer() lex.process("argument1=value1") assert lex.tokens == [ Text("argument1"), Literal("="), Text("value1"), ]