Exemplo n.º 1
0
def registry_infer_exts():
    extensions.register_parts(
        '*',
        {
            'advmod': lambda c, t: (4, "extract_for('word', 'advmod')"),
            'det': lambda c, t: (4, "extract_for('plain', 'det')"),
            'obl': lambda c, t: (4, "extract_for('chunk', 'verb:obl')"),
            'cop': lambda c, t: (2, "cop='c_aux'"),
            'head_amod': lambda c, t: (2, "head_amod=interr('what')"),

            # $ spt 'A cobra fala com o menino.'
            'nsubj': lambda c, t: induce_subj(c, t),
            # obj如果有kindof匹配结果, 下面的注册项不会被执行
            # 'obj': lambda c, t: induce_measure(c, t),
            'nummod': lambda c, t:
            (4, f"extract_for('plain+number', '{c.name}')"),
            'obl:arg': lambda c, t: induce_propn(c, t),
        })
Exemplo n.º 2
0
def get_verb_interr(c: DomainToken, part: Text):
    from sagas.nlu.inspectors_dataset import get_interrogative
    from sagas.nlu.transliterations import translits
    word = translits.translit(c.text.split('/')[0], 'ko')
    rep = get_interrogative(word, 'ko')
    if rep:
        return 4, f"interr_root('{rep}')"
    else:
        return 4, "interr_root('??')"


extensions.register_parts(
    'ko', {
        'nsubj':
        lambda c, t: [(4, "extract_for('plain', 'nsubj')"),
                      (2, "nsubj=agency")],
        'obl':
        lambda c, t: get_nouns_spec(c, 'obl'),
    })
extensions.register_domains(
    'ko',
    {
        # Testcases:
        # $ sko '우리는 피자와 스파게티가 필요해요.'   -> spec
        # $ sko '이번 주말에 벌써 계획이 있어요?'     -> interr_root
        'verb': lambda c, t: get_verb_spec(c, t) or get_verb_interr(c, t),
    })


class Rules_ko(LangSpecBase):
    @staticmethod
Exemplo n.º 3
0
def get_from_to(c: InferPart, t):
    results = []
    if 'カラ' in c.domain.rels:
        results.append((4, "extract_for('plain', 'カラ')"))
    if 'マデ' in c.domain.rels:
        results.append((4, "extract_for('plain', 'マデ')"))
    return results


extensions.register_parts(
    'ja', {
        '時間': lambda c, t:
        (4, "extract_for('plain+date_search+date_parse', '時間')"),
        'ガ': lambda c, t: (4, "extract_for('plain', 'ガ')"),
        'デ': lambda c, t: (4, "extract_for('plain', 'デ')"),
        'ニ': lambda c, t: (4, "extract_for('plain', 'ニ')"),
        '修飾': lambda c, t: (4, "extract_for('plain+number', '修飾')"),
        'カラ': lambda c, t: get_from_to(c, t),
        'マデ': lambda c, t: get_from_to(c, t),
    })


def get_verb_interr(c: DomainToken, part: Text):
    return 4, "interr_root('??')"


extensions.register_domains('ja', {
    'verb': lambda c, t: get_verb_interr(c, t),
})
registry_named_exprs(
Exemplo n.º 4
0
import logging

logger = logging.getLogger(__name__)

pron = ['c_pron', 'c_det']


def induce_pron(c: InferPart, t: Text):
    if c.lemma == '-PRON-':
        return [(4, f"extract_for('plain', '{c.name}')"),
                (2, f"{c.name}=pron")]


extensions.register_parts(
    'lt', {
        'nsubj': lambda c, t: induce_pron(c, t),
        'nmod': lambda c, t: induce_pron(c, t),
    })


class Rules_lt(LangSpecBase):
    # def opts(self):
    #     return {'engine':'spacy'}

    @staticmethod
    def prepare(meta: Dict[Text, Any]):
        tc.emp('yellow', '.. Rules_lt(Lithuanian, 立陶宛语) prepare phrase')

    def verb_rules(self):
        pat, actions_obj = (self.pat, self.actions_obj)
Exemplo n.º 5
0
import logging

from sagas.nlu.tool_base import LangToolBase

logger = logging.getLogger(__name__)

def induce_dim(c:InferPart, t:Text, dim:Text):
    pat=c.domain.pattern(t)
    r = pat(**{c.name:dateins(dim)})
    logger.debug(f"t:{t}, dim:{dim}, result:{r[1]}, {r[0]}")
    if r[1]:
        return 2, f"{c.name}=dateins('{dim}')"

extensions.register_parts('pt',{
    # $ spt 'Eu preciso disso até amanhã.'
    'advmod': lambda c,t: induce_dim(c, t, 'time'),
    # $ spt 'Ele está entre meu irmão e minha irmã.'
    'case': lambda c,t: predict_pos(c, t, 'c_adp'),
})

class Rules_pt(LangToolBase):
    @staticmethod
    def prepare(meta: Dict[Text, Any]):
        tc.emp('yellow', '.. Rules_pt(Portuguese, 葡萄牙语) prepare phrase')

    def verb_rules(self):
        pat, actions_obj = (self.pat, self.actions_obj)

        self.collect(pats=[
            # $ se 'I want to watch a movie'
            pat(5, name='behave_willing_ev').verb(behaveof('want', 'v'),
                                                  pred_any_path('xcomp/obj', 'social_event', 'n')),
Exemplo n.º 6
0
from sagas.nlu.tool_base import LangToolBase

logger = logging.getLogger(__name__)

def head_interr(c:InferPart, part:Text):
    from sagas.nlu.inspectors_dataset import get_interrogative
    rep=get_interrogative(c.lemma, 'ru')
    if rep:
        return 2, f"{part}=interr('{rep}')"
    else:
        return 4, f"extract_for('plain', '{part}')"

extensions.register_parts('ru',{
    'nsubj': lambda c,t: [(4, "extract_for('plain', 'nsubj')"),
                          (2, "nsubj=agency")],
    'head_csubj': lambda c,t: head_interr(c, 'head_csubj'),
})
class Rules_ru(LangToolBase):
    @staticmethod
    def prepare(meta: Dict[Text, Any]):
        tc.emp('yellow', '.. Rules_ru(Russian, 俄语) prepare phrase')

    def verb_rules(self):
        pat, actions_obj = (self.pat, self.actions_obj)

        self.collect(pats=[
            # $ sru 'У Вас есть сигареты?'
            # $ sru 'У Вас есть пепельница?'
            pat(5, name='behave_exist').verb(extract_for('plain', 'nsubj'),
                                           behaveof('exist', 'v'), nsubj=agency),