Exemplo n.º 1
0
 def run(self):
     if self.config.task == 1:
         self.g_theta_layers = [2048, 2048]
         self.f_phi_layers = [2048, 2048]
     elif self.config.task == 2:
         self.g_theta_layers = [1024, 1024]
         self.f_phi_layers = [1024, 1024]
     elif self.config.task == 3:
         self.g_theta_layers = [1024, 1024, 1024]
         self.f_phi_layers = [1024] * 3
     elif self.config.task == 4:
         self.g_theta_layers = [1024, 1024]
         self.f_phi_layers = [1024, 1024]
     elif self.config.task == 5:
         self.g_theta_layers = [4096, 4096]
         self.f_phi_layers = [4096, 4096]
     else:
         log.error("Task index error")
     self.g_theta_layers.append(
         1)  # attention should be ended with layer sized 1
     md = Module(self.config, self.g_theta_layers, self.f_phi_layers)
     if self.embedding == 'sum':
         embedded_c = md.contextSum(self.context, with_embed_matrix=True)
         embedded_q = md.questionSum(self.question, with_embed_matrix=True)
     elif self.embedding == 'concat':
         embedded_c = md.contextConcat(self.context, with_embed_matrix=True)
         embedded_q = md.questionConcat(self.question,
                                        with_embed_matrix=True)
     m, alphas = md.hop_1(embedded_c,
                          embedded_q,
                          phase=self.is_training,
                          activation=tf.nn.tanh)
     input_ = md.concat_with_q(m[-1], embedded_q)
     pred = md.f_phi(input_,
                     activation=tf.nn.relu,
                     reuse=False,
                     with_embed_matrix=True,
                     is_concat=True,
                     use_match=self.use_match,
                     phase=self.is_training)
     correct, accuracy, loss, sim_score, p, a = md.get_corr_acc_loss(
         pred,
         self.answer,
         self.answer_match,
         self.answer_idx,
         with_embed_matrix=True,
         is_concat=True,
         use_match=self.use_match,
         is_cosine_sim=True)
     return pred, correct, accuracy, loss, sim_score, p, a
Exemplo n.º 2
0
 def run(self):
     g_theta_layers = [256, 128]  # attention component
     f_phi_layers = [512, 512]  # reasoning component
     g_theta_layers.append(1)  # attention should be ended with unit layer
     md = Module(self.config_txt,
                 g_theta_layers,
                 f_phi_layers,
                 seed=self.seed,
                 word_embed=self.word_embed_dim,
                 hidden_dim=self.hidden_dim,
                 batch_size=self.batch_size)
     embedded_c = md.contextLSTM(self.context, self.sentence_real_len)
     embedded_q = md.questionLSTM(self.question, self.question_real_len)
     embedded_c = md.add_label(embedded_c, self.label)
     r, alphas = md.hop_2(embedded_c,
                          embedded_q,
                          phase=self.is_training,
                          activation=tf.nn.relu)
     input_ = md.concat_with_q(r[-1], embedded_q)
     prediction = md.f_phi(input_, reuse=False, phase=self.is_training)
     pred = tf.split(prediction, 3, axis=1)
     correct, accuracy, loss = md.get_corr_acc_loss(pred, self.answer,
                                                    self.answer_num)
     return pred, correct, accuracy, loss