def run_simp(self, key, ctx:Context): if ctx.meta['lang']=='da': # if 'ikke' in ctx.chunks[key] or 'ikke'==ctx.lemmas[key]: if ctx.chunk_contains(key, ['ikke']) or ctx.lemmas[key] in ['ikke']: return True elif ctx.meta['lang']=='de': if ctx.chunk_contains(key, ['nicht']) or ctx.lemmas[key] in ['nicht']: return True return False
def run(self, key, ctx:Context): from sagas.nlu.inspectors_dataset import interrogative_maps, trans_val lang=ctx.meta['lang'] if lang in interrogative_maps: data_map=interrogative_maps[lang][self.cat] if self.is_part: # val=ctx.lemmas[key] word_full=ctx.get_word(key) val=trans_val(word_full, lang) succ= ctx.chunk_contains(key, data_map) or val in data_map if succ: ctx.add_result(self.name(), 'default', key, {'category': self.cat, **word_values(word_full, lang)}, delivery_type='sentence') return succ else: word_val=trans_val(key, lang) logger.debug(f"*** {key} -- {word_val}, {data_map}") succ= word_val in data_map if succ: ctx.add_result(self.name(), 'default', 'head', {'category': self.cat, **word_values(key, lang)}, delivery_type='sentence') return succ return False
def run(self, key, ctx:Context): from sagas.nlu.inspectors_dataset import negative_maps from sagas.nlu.inspectors_dataset import translit_langs from sagas.nlu.transliterations import translits lang=ctx.meta['lang'] if lang in negative_maps: data_map=negative_maps[lang] if lang in translit_langs: word_val=translits.translit(ctx.words[key], lang) else: word_val=ctx.lemmas[key] if ctx.chunk_contains(key, data_map) or word_val in data_map: return True return False