Exemplo n.º 1
0
def test_parse_existed_implicit_combinator():
    # comb := STRING
    grammar = Grammar()
    token_id = grammar.add_implicit('(')
    result = make_combinator(grammar, '"("')
    assert isinstance(result, TokenCombinator)
    assert result.token_id == token_id
Exemplo n.º 2
0
def test_parse_token_combinator():
    # comb := NAME
    grammar = Grammar()
    token_id = grammar.add_token('Name')
    result = make_combinator(grammar, 'Name')
    assert isinstance(result, TokenCombinator)
    assert result.token_id == token_id
Exemplo n.º 3
0
def test_parse_repeat_combinator():
    # comb := '{' seq '}'
    grammar = Grammar()
    token_id = grammar.add_token('Name')
    result = make_combinator(grammar, '{ Name }')
    assert isinstance(result, RepeatCombinator)
    assert isinstance(result.combinator, TokenCombinator)
    assert result.combinator.token_id == token_id
Exemplo n.º 4
0
def test_parse_optional_combinator():
    # comb := '[' seq ']'
    grammar = Grammar()
    token_id = grammar.add_token('Name')
    result = make_combinator(grammar, '[ Name ]')
    assert isinstance(result, OptionalCombinator)
    assert isinstance(result.combinator, TokenCombinator)
    assert result.combinator.token_id == token_id
Exemplo n.º 5
0
def test_parse_parselet_with_priority_combinator():
    # comb := NAME
    grammar = Grammar()
    parser_id = grammar.add_parselet('name')
    result = make_combinator(grammar, 'name <100>')
    assert isinstance(result, ParseletCombinator)
    assert result.parser_id == parser_id
    assert result.priority == 100
Exemplo n.º 6
0
Arquivo: gvm.py Projeto: alurin/gvm
def main():
    grammar = Grammar()

    # core grammar
    grammar.extend(create_core_grammar())

    # combinator grammars
    grammar.extend(create_combinator_grammar())

    # add macro grammar. e.g. expr
    grammar.add_parser('macro',
                       'name:Name "::=" combinator: combinator_sequence')

    # # dump grammar
    # dump_grammar(sys.stdout, grammar)

    content = """
expr ::= 1 2 3 ; |

def main(): pass
""".strip()

    #
    scanner = DefaultScanner(grammar, '<example>', content)
    parser = Parser(scanner)
    try:
        parser.parse(grammar.parselets['macro'])
    except ParserError as ex:
        ex.to_stream(create_writer(sys.stderr), content)
        exit(1)
Exemplo n.º 7
0
def test_parse_token_priority_fail_combinator():
    # comb := NAME
    grammar = Grammar()
    grammar.add_token('Name')
    with pytest.raises(DiagnosticError):
        make_combinator(grammar, 'Name <100>')