def test_parse_object_macro(): tokens = list(filter(lambda t: t.type is not TokenType.WHITESPACE, tokenize_line("#define DRUG_NUMBER 420"))) actual = parse_line(tokens) assert isinstance(actual, ObjectMacro) assert actual.identifier == "DRUG_NUMBER" assert [t.value.group() for t in actual.tokens] == ["420"]
def test_parse_function_macro(): tokens = list(filter(lambda t: t.type is not TokenType.WHITESPACE, tokenize_line("#define A(B, C, D, E, F, G) B + C"))) actual = parse_line(tokens) assert isinstance(actual, FunctionMacro) assert actual.identifier == "A" assert actual.params == ("B", "C", "D", "E", "F", "G") assert [t.value.group() for t in actual.expression] == ["B", "+", "C"]
def test_tokenize_line(): actual = tokenize_line("#define uwu 420") expected = [(TokenType.DIRECTIVE, "#define"), (TokenType.WHITESPACE, " "), (TokenType.IDENTIFIER, "uwu"), (TokenType.WHITESPACE, " "), (TokenType.NUM_LITERAL, "420")] for a, e in zip(actual, expected): assert a.type == e[0] assert a.value.group(0) == e[1]
def test_parse_if_directive(line, expected): tokens = list(filter(lambda t: t.type is not TokenType.WHITESPACE, tokenize_line(line))) actual = parse_line(tokens) assert actual.directive == expected.directive if not expected.expression: assert not actual.expression else: assert [t.value.group() for t in actual.expression] == expected.expression
def test_single_tokens(input_str, expected_type): actual = tokenize_line(input_str) assert len(actual) == 1 assert actual[0].type == expected_type
def test_parse_include(line, expected): tokens = list(filter(lambda t: t.type is not TokenType.WHITESPACE, tokenize_line(line))) actual = parse_line(tokens) assert actual == expected