Exemple #1
0
 def get_topic_words(self):
     result = []
     result.extend(
         tokenize_sentence(self.entity_instance.get_entity_id().lower()))
     result.extend(
         tokenize_sentence(self.property_def.property_name.lower()))
     return result
Exemple #2
0
 def to_json_dict(self, is_for_training=False):
     result = {
         # use _question instead of question as the keyword so that they are in alphabetic order
         '_question': self.question,
         'answer': self.answer,
         'context': self.context_list,
     }
     if is_for_training:
         result.update({
             '_question_topic_words': intersect_lists(
                 tokenize_sentence(self.question.lower()), self.qa_concept.get_topic_words()
             ),
             'answer_topic_words': intersect_lists(
                 tokenize_sentence(self.answer.lower()), self.qa_concept.get_topic_words()
             ),
             'qa_pairs_with_matching_score': self.qa_pairs_with_matching_score
         })
     return result
 def to_json_dict(self, is_for_training=False):
     result = {
         # use _question instead of question as the keyword so that they are in alphabetic order
         '_question': self.question,
         'answer': self.answer,
         'context': self.context_map,
     }
     if is_for_training:
         result.update({
             '_question_topic_words':
             intersect_lists(tokenize_sentence(self.question),
                             self.qa_concept.get_topic_words()),
             'answer_topic_words':
             intersect_lists(tokenize_sentence(self.answer),
                             self.qa_concept.get_topic_words()),
             'qa_pairs_with_matching_score':
             self.qa_pairs_with_matching_score
         })
     return result
Exemple #4
0
 def get_topic_words(self):
     result = []
     result.extend(tokenize_sentence(self.entity_instance.get_entity_id().lower()))
     result.extend(tokenize_sentence(self.property_def.property_name.lower()))
     return result
 def get_topic_words(self):
     result = []
     result.extend(tokenize_sentence(self.entity_instance.property_value_map['name']))
     result.extend(tokenize_sentence(self.relation_def.relation_name))
     return result
 def get_topic_words(self):
     result = []
     result.extend(
         tokenize_sentence(self.entity_instance.property_value_map['name']))
     result.extend(tokenize_sentence(self.relation_def.relation_name))
     return result