예제 #1
0
def test_parse_existed_implicit_combinator():
    # comb := STRING
    grammar = Grammar()
    token_id = grammar.add_implicit('(')
    result = make_combinator(grammar, '"("')
    assert isinstance(result, TokenCombinator)
    assert result.token_id == token_id
예제 #2
0
def test_parse_token_combinator():
    # comb := NAME
    grammar = Grammar()
    token_id = grammar.add_token('Name')
    result = make_combinator(grammar, 'Name')
    assert isinstance(result, TokenCombinator)
    assert result.token_id == token_id
예제 #3
0
파일: grammar.py 프로젝트: alurin/gvm
    def add_parser(self, parser_id: Union[str, ParseletID], combinator: Union[Combinator, str, SymbolID],
                   generator: ActionGenerator = None, *, priority: int = PRIORITY_MAX, location: Location = None) \
            -> ParseletID:
        location = location or py_location(2)
        # convert input combinator to instance of Combinator
        if isinstance(combinator, str):
            from gvm.language.helpers import make_combinator
            combinator = make_combinator(self, combinator, location)
        else:
            combinator = flat_combinator(combinator)

        # convert action to combinator action
        generator = generator or make_return_result()
        action = generator(combinator)

        # check parser type
        if isinstance(parser_id, str):
            parser_id = self.add_parselet(parser_id,
                                          location=location,
                                          result_type=action.result_type)
        else:
            # check result of action with ret
            if not is_subclass(action.result_type, parser_id.result_type):
                raise GrammarError(
                    location,
                    f'Can not add parser to parselet because return types is different: '
                    f'{action.result_type} and {parser_id.result_type}')

        # add parser tot table
        self.tables[parser_id].add_parser(combinator, action, priority,
                                          location)
        return parser_id
예제 #4
0
def test_parse_repeat_combinator():
    # comb := '{' seq '}'
    grammar = Grammar()
    token_id = grammar.add_token('Name')
    result = make_combinator(grammar, '{ Name }')
    assert isinstance(result, RepeatCombinator)
    assert isinstance(result.combinator, TokenCombinator)
    assert result.combinator.token_id == token_id
예제 #5
0
def test_parse_optional_combinator():
    # comb := '[' seq ']'
    grammar = Grammar()
    token_id = grammar.add_token('Name')
    result = make_combinator(grammar, '[ Name ]')
    assert isinstance(result, OptionalCombinator)
    assert isinstance(result.combinator, TokenCombinator)
    assert result.combinator.token_id == token_id
예제 #6
0
def test_parse_parselet_with_priority_combinator():
    # comb := NAME
    grammar = Grammar()
    parser_id = grammar.add_parselet('name')
    result = make_combinator(grammar, 'name <100>')
    assert isinstance(result, ParseletCombinator)
    assert result.parser_id == parser_id
    assert result.priority == 100
예제 #7
0
def test_parse_token_priority_fail_combinator():
    # comb := NAME
    grammar = Grammar()
    grammar.add_token('Name')
    with pytest.raises(DiagnosticError):
        make_combinator(grammar, 'Name <100>')