def test_type_errors(): F = fact('F', ['a']) RULE = rule( 'a', eq('1').interpretation( custom(int) ) ).interpretation( F.a ) parser = Parser(RULE) match = parser.match('a 1') with pytest.raises(TypeError): match.fact F = fact('F', ['a']) RULE = rule( 'a', eq('1').interpretation( custom(int) ) ).interpretation( custom(str) ) parser = Parser(RULE) match = parser.match('a 1') with pytest.raises(TypeError): match.fact
def test_checks(): tokenizer = MorphTokenizer() context = Context(tokenizer) with pytest.raises(ValueError): gram('UNK').activate(context) with pytest.raises(ValueError): custom(lambda _: True, types='UNK').activate(context)
def test_rule_custom(): RULE = rule( '3', '.', '14' ).interpretation( custom(float) ) parser = Parser(RULE) match = parser.match('3.14') assert match.fact == 3.14
def test_rule_custom_custom(): MAPPING = {'a': 1} RULE = rule( 'A' ).interpretation( custom(str.lower).custom(MAPPING.get) ) parser = Parser(RULE) match = parser.match('A') assert match.fact == 1
def test_rule_attribute_custom(): F = fact('F', ['a']) RULE = rule( '1' ).interpretation( F.a ).interpretation( custom(int) ) parser = Parser(RULE) match = parser.match('1') assert match.fact == 1
def test_rule_custom_attribute(): F = fact('F', ['a']) RULE = rule( '1' ).interpretation( custom(int) ).interpretation( F.a ).interpretation( F ) parser = Parser(RULE) match = parser.match('1') record = match.fact assert record == F(a=1) assert record.spans == [(0, 1)] assert record.as_json == {'a': 1}