def subject_network(self, x): ''' :param x: query embedding after look-up :return:subject_score:[batch_size,cand_sub_size] loss_subject:[1] ''' with tf.variable_scope("subject_part"): _, query4subject = NN.bi_gru(x, self.x_lens, self.n_hidden, "bi_gru4subject_query", self.keep_prob, self.is_training) query4subject = tf.concat(query4subject, -1) # [batch,type_len] query4subject = tf.sigmoid( NN.linear(query4subject, self.qa.type_len, name="query_trans_subject")) with tf.variable_scope(tf.get_variable_scope(), reuse=True): self.type_emb_w_martix = tf.get_variable( "query_trans_subjectkernel") self.type_emb_b_martix = tf.get_variable( "query_trans_subjectbias") # [batch_size,emb_size] gold_subject = self.subject_emb[:, 0, :] self.loss_subject = gold_subject * tf.log( tf.clip_by_value( query4subject, 1e-10, 1.0)) + (1 - gold_subject) * tf.log( tf.clip_by_value(1 - query4subject, 1e-10, 1.0)) # [1] loss_subject = tf.reduce_mean( -tf.reduce_sum(self.loss_subject, axis=1), axis=0) with tf.variable_scope("test"): # [batch,1,cand_sub] subject_score = tf.matmul( tf.expand_dims(query4subject, 1), tf.transpose(self.subject_emb, perm=[0, 2, 1])) # [batch,cand_sub,1] subject_score = tf.squeeze(subject_score, 1) # subject_score = tf.transpose(subject_score, [0, 2, 1]) return subject_score, loss_subject
def matmul_query_relation(self, query_vec, rel_vec, name): query_vec = NN.linear(query_vec, int(rel_vec.shape[-1]), name=name) score = tf.matmul(query_vec, tf.transpose(rel_vec)) # [batch,cand_rel] return score