def replace_with_context(self, sessionid_in): self._deal_last_question(sessionid_in) # todo 添加分类 user_cur = usermanager.get_user(sessionid_in) # todo 添加当前槽位切换 domain_classification = self._domain_classification_class.classify(user_cur['str_synonym']) mylog.info('domain_classification:\t\t{}'.format(domain_classification)) usermanager.update(sessionid_in, domain_classification=domain_classification) # usermanager.get_user(sessionid_in).update_slots(index=-1,sentence_pattern_classification=sentence_pattern_classification_from_kg) # self._usermanager_class.update(sessionid_in,{'str_raw':str_in,'str_synonym':str_synonym, # 'domain_classification':domain_classification,'str_synonym_cut':str_synonym_cut}) words_list_out, dst_word_to_fufill, label2entities_cur_dict = self.replace_with_context_only_question( sessionid_in) # print('words_list_out, dst_word_to_fufill, label2entities_cur_dict==>',words_list_out, dst_word_to_fufill, label2entities_cur_dict) flag_inter, abbrev_str, abbrev2std_list = self._fuzzy_interaction_class.deal_fuzzy_interaction(sessionid_in, label2entities_cur_dict, user_cur[ 'str_raw'], words_list_out) usermanager.get_user(sessionid_in).insert_slots( **{'question': user_cur['str_synonym'], 'slots_question': label2entities_cur_dict, 'domain_classification': user_cur['domain_classification'], 'sentence_pattern_classification': user_cur['sentence_pattern_classification']}) rds.set(user_cur['jdpin_id'] + _flag_recomm_sent_pattern, 0) return words_list_out, dst_word_to_fufill, label2entities_cur_dict, flag_inter, abbrev_str, abbrev2std_list
def _response(self, input): _data_return = copy.deepcopy(self._data_return) bool_input, _msg = self._check(input) if bool_input: _data_return['status'] = 1 # str_synonym, words = self._preprocess_class.process(input.question) user_cur = self._usermanager.create(input.jdpin, input.session) # user_cur.update({'str_synonym':str_synonym,'str_synonym_cut':str_synonym}) usermanager.update(input.session, str_raw=input.question) _data_return['data'] = self._rewrite(sessionid_in=input.session) _data_return['data']['str_synonym'] = user_cur['str_synonym'] else: _data_return['status'] = 0 _data_return['msg'] = _msg return _data_return
def replace_with_context_only_question(self, sessionid_in): """ 根据历史对话,从历史对话提取实体,根据当前str中是否存在指代词,决定 实体是补充还是替换 args: str_in: 输入字符串 words_list_in:输入字符串分词结果 jdpin_in: id return: words_list_out: 补全后的分词列表 dst_word_out: 从历史对话记录提取的slots label2entities_cur_dict: 当前用户话提取的slots,准备存入列表的slots """ user_cur = self._usermanager_class.get_user(sessionid_in) sentence_classification = self._classification_sentence_pattern(sessionid_in,user_cur['str_raw']) mylog.info('sentence_classification:\t\t{}\tstr_raw\t{}'.format(sentence_classification, user_cur['str_raw'])) self._usermanager_class.update(sessionid_in, sentence_pattern_classification=sentence_classification) src_prounoun_word, dst_word_to_fufill = self._get_prounoun_and_dst_words(sessionid_in) label2entities_cur_dict, words_timestap = self._prounoun_class.get_entity_from_strs([user_cur['str_synonym']], user_cur[ 'domain_classification']) words_list_out = self.replace_words_list(user_cur['str_synonym_cut'], src_prounoun_word, dst_word_to_fufill) usermanager.update(sessionid_in,**{'dst_word_to_fufill':dst_word_to_fufill,'label2entities_cur_dict':label2entities_cur_dict}) return words_list_out, dst_word_to_fufill, label2entities_cur_dict
s = '旅行险投保年龄意外险是什么两全险' # s='境外旅行能选择这款保险计划吗' # s='公司几点上班' s = '安行万里租的车保吗' s = '我要去境外旅行' s = '我想买个两全险' s = '安行万里境外险的保单信息' s='86岁能投保安行万里吗' words = jieba.lcut(s) jdpin = str(12345678) session = 'session' + jdpin pd = PronounDeal() ner = NerDeal() usermanager.create(jdpin, session) usermanager.update(session, str_raw=s) out = ner.replace_with_context(session) print(out) """ 'domain_classification':None, 'str_raw':None, 'str_synonym':None, 'str_raw_cut':None, 'str_synonym_cut':None, 'slots_history':{}, 'slots_cur':{}, 'sentence_pattern_classification':None, 'session_id':None, 'jdpin_id':None, 'login_time':None,