def test_parse_token_combinator(): # comb := NAME grammar = Grammar() token_id = grammar.add_token('Name') result = make_combinator(grammar, 'Name') assert isinstance(result, TokenCombinator) assert result.token_id == token_id
def test_parse_repeat_combinator(): # comb := '{' seq '}' grammar = Grammar() token_id = grammar.add_token('Name') result = make_combinator(grammar, '{ Name }') assert isinstance(result, RepeatCombinator) assert isinstance(result.combinator, TokenCombinator) assert result.combinator.token_id == token_id
def test_parse_optional_combinator(): # comb := '[' seq ']' grammar = Grammar() token_id = grammar.add_token('Name') result = make_combinator(grammar, '[ Name ]') assert isinstance(result, OptionalCombinator) assert isinstance(result.combinator, TokenCombinator) assert result.combinator.token_id == token_id
def test_parse_token_priority_fail_combinator(): # comb := NAME grammar = Grammar() grammar.add_token('Name') with pytest.raises(DiagnosticError): make_combinator(grammar, 'Name <100>')