Esempio n. 1
0
    def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
        is_token = tokenized_seq[end]

        try:
            verb = get_child(is_token, xcomp, VERB)
            noun = get_child(verb, dobj, NOUN)
            return [verb, noun]
        except PatternNotFoundException:
            pass

        try:
            return get_child(is_token, xcomp, [VERB, NOUN])
        except PatternNotFoundException:
            pass

        return get_ancestor(is_token, [ccomp, ROOT], NOUN)
Esempio n. 2
0
 def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
     qualia_theorem_token = tokenized_seq[end]
     return get_ancestor(qualia_theorem_token, [ROOT, xcomp], VERB)
Esempio n. 3
0
 def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
     been_tok = tokenized_seq[end]
     return get_ancestor(been_tok, [ROOT, ccomp], VERB)
 def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
     qualia_theorem_token = tokenized_seq[end]
     return get_ancestor(qualia_theorem_token, [dobj, nsubj], NOUN)
 def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
     of_token = tokenized_seq[end]
     return get_ancestor(of_token, [nmod, ROOT], [NOUN])
 def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
     other_token = tokenized_seq[end]
     return get_ancestor(other_token, conj, NOUN)
 def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
     qualia_theorem_token = tokenized_seq[start]
     return get_ancestor(qualia_theorem_token, ROOT, NOUN)