def test_single_named_argument(): lex = ArgumentsLexer() lex.process("argument1=value1") assert lex.tokens == [ Text("argument1"), Literal("="), Text("value1"), ]
def test_multiple_unnamed_arguments(): lex = ArgumentsLexer() lex.process("value1, value2") assert lex.tokens == [ Text("value1"), Literal(","), WS(" "), Text("value2"), ]
def test_spaces(): lex = ArgumentsLexer() lex.process("argument1=value1 value2") assert lex.tokens == [ Text("argument1"), Literal("="), Text("value1"), WS(" "), Text("value2"), ]
def test_collect_join_with_different_joiner(): p = init_parser("\n") p.tokens = [ Text("De-do-do"), Text("do"), EOL, EOL, ] expected = "De-do-do-do" assert p.collect_join([EOF], "-") == expected
def test_normal_text(): lex = TextLexer() lex.process("Normal text") assert lex.tokens == [ Text("Normal"), Text(" "), Text("text"), EOL, EOF, ]
def test_variable_definition_accepted_characters(): lex = MainLexer() lex.process(":abcAB.C0123-_:value123") assert lex.tokens == [ Literal(":"), Text("abcAB.C0123-_"), Literal(":"), Text("value123"), EOL, EOF, ]
def test_variable_definition(): lex = MainLexer() lex.process(":variable:value123") assert lex.tokens == [ Literal(":"), Text("variable"), Literal(":"), Text("value123"), EOL, EOF, ]
def test_collect_join(): p = init_parser("\n") p.tokens = [ Text("Some te"), Text("xt that will be joined"), Literal("!"), EOL, EOL, ] expected = "Some text that will be joined!" assert p.collect_join([EOF]) == expected
def test_command(): lex = MainLexer() lex.process("::command:arg0,arg1") assert lex.tokens == [ Literal("::"), Text("command"), Literal(":"), Text("arg0,arg1"), EOL, EOF, ]
def test_process_letters(): lex = BaseLexer() lex.process("abcd") assert lex.tokens == [Text("a"), Text("b"), Text("c"), Text("d"), EOL, EOF] assert [t.position for t in lex.tokens] == [ (0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (1, 0), ]
def test_include_content_with_arguments(): lex = MainLexer() lex.process("<< type:/path/to/it.jpg(value1,argument2=value2)") assert lex.tokens == [ Literal("<<"), WS(" "), Text("type:/path/to/it.jpg"), Literal("("), Text("value1,argument2=value2"), Literal(")"), EOL, EOF, ]
def test_directive_escape(mock_process_directive): lex = MainLexer() lex.process("::\\#name:/path/to/file") assert not mock_process_directive.called assert lex.tokens == [ Literal("::"), Text("#name"), Literal(":"), Text("/path/to/file"), EOL, EOF, ]
def test_single_class(): lex = TextLexer() lex.process("[name]#content#") assert lex.tokens == [ Literal("["), Text("name"), Literal("]"), Literal("#"), Text("content"), Literal("#"), EOL, EOF, ]
def test_multiple_classes(): lex = TextLexer() lex.process("[name1,name2]#content#") assert lex.tokens == [ Literal("["), Text("name1,name2"), Literal("]"), Literal("#"), Text("content"), Literal("#"), EOL, EOF, ]
def test_macro_named_attributes(): lex = TextLexer() lex.process("[macro](attr1=value1,attr2=value2)") assert lex.tokens == [ Literal("["), Text("macro"), Literal("]"), Literal("("), Text("attr1=value1,attr2=value2"), Literal(")"), EOL, EOF, ]
def test_macro(): lex = TextLexer() lex.process("[macro](value1,value2)") assert lex.tokens == [ Literal("["), Text("macro"), Literal("]"), Literal("("), Text("value1,value2"), Literal(")"), EOL, EOF, ]
def test_escaped_quotes(): lex = ArgumentsLexer() lex.process(r"Argument \"with\" quotes") assert lex.tokens == [ Text("Argument"), WS(" "), Literal("\\"), Literal('"'), Text("with"), Literal("\\"), Literal('"'), WS(" "), Text("quotes"), ]
def test_normal_text(): lex = PreprocessVariablesLexer() lex.process("Some text") assert lex.tokens == [ Text("S"), Text("o"), Text("m"), Text("e"), Text(" "), Text("t"), Text("e"), Text("x"), Text("t"), Text("\n"), ]
def test_title(): lex = MainLexer() lex.process( dedent(""" .A title Some text """)) assert lex.tokens == [ Literal("."), Text("A title"), EOL, Text("Some text"), EOL, EOF, ]
def test_class_content_with_styles(): lex = TextLexer() lex.process("[name]#_content_#") assert lex.tokens == [ Literal("["), Text("name"), Literal("]"), Literal("#"), Literal("_"), Text("content"), Literal("_"), Literal("#"), EOL, EOF, ]
def test_id(): lex = MainLexer() lex.process( dedent(""" #someid Some text """)) assert lex.tokens == [ Literal("#"), Text("someid"), EOL, Text("Some text"), EOL, EOF, ]
def test_single_unnamed_argument(): lex = ArgumentsLexer() lex.process("value1") assert lex.tokens == [ Text("value1"), ]
def test_title_multiple_spaces_after_mark(): lex = MainLexer() lex.process( dedent(""" . A title with spaces Some text """)) assert lex.tokens == [ Literal("."), WS(" "), Text("A title with spaces"), EOL, Text("Some text"), EOL, EOF, ]
def test_empty_header(): lex = MainLexer() lex.process("=") assert lex.tokens == [ Text("="), EOL, EOF, ]
def test_header_markers_in_text(): lex = MainLexer() lex.process("Definitely not a === header") assert lex.tokens == [ Text("Definitely not a === header"), EOL, EOF, ]
def test_variable_marker_in_text(): lex = MainLexer() lex.process("Not a :variable:") assert lex.tokens == [ Text("Not a :variable:"), EOL, EOF, ]
def test_attributes_marker_in_text(): lex = MainLexer() lex.process("Not [attributes]") assert lex.tokens == [ Text("Not [attributes]"), EOL, EOF, ]
def test_comment(): lex = MainLexer() lex.process("// Some comment") assert lex.tokens == [ Text("// Some comment"), EOL, EOF, ]
def test_import_directive(mock_file): lex = MainLexer() lex.process("::#include:/path/to/file") assert lex.tokens == [ Text("just some data"), EOL, EOF, ]
def test_variable_negation(): lex = MainLexer() lex.process(":!variable:") assert lex.tokens == [ Literal(":"), Text("!variable"), Literal(":"), EOL, EOF, ]