def call(self, inputs, mask=None): features, feature_graph_index = inputs feature_graph_index = tf.reshape(feature_graph_index, (-1,)) _, _, count = tf.unique_with_counts(feature_graph_index) m = K.dot(features, self.m_weight) if self.use_bias: m += self.m_bias self.h = tf.zeros(tf.stack( [tf.shape(features)[0], tf.shape(count)[0], self.n_hidden])) self.c = tf.zeros(tf.stack( [tf.shape(features)[0], tf.shape(count)[0], self.n_hidden])) q_star = tf.zeros(tf.stack( [tf.shape(features)[0], tf.shape(count)[0], 2 * self.n_hidden])) for i in range(self.T): self.h, c = self._lstm(q_star, self.c) e_i_t = tf.reduce_sum( m * repeat_with_index(self.h, feature_graph_index), axis=-1) exp = tf.exp(e_i_t) # print(exp.shape) seg_sum = tf.transpose( tf.segment_sum( tf.transpose(exp, [1, 0]), feature_graph_index), [1, 0]) seg_sum = tf.expand_dims(seg_sum, axis=-1) # print(seg_sum.shape) a_i_t = exp / tf.squeeze( repeat_with_index(seg_sum, feature_graph_index)) # print(a_i_t.shape) r_t = tf.transpose(tf.segment_sum( tf.transpose(tf.multiply(m, a_i_t[:, :, None]), [1, 0, 2]), feature_graph_index), [1, 0, 2]) q_star = K.concatenate([self.h, r_t], axis=-1) return q_star
def test_repeat_with_index(self): repeat_result = self.sess.run( repeat_with_index(self.x, self.index, axis=1)) self.assertListEqual(list(repeat_result.shape), [1, 6, 4]) self.assertEqual(repeat_result[0, 0, 0], repeat_result[0, 1, 0]) self.assertEqual(repeat_result[0, 0, 0], repeat_result[0, 2, 0]) self.assertNotEqual(repeat_result[0, 0, 0], repeat_result[0, 3, 0]) self.assertEqual(repeat_result[0, 3, 0], repeat_result[0, 4, 0])
def phi_e(self, inputs): node, weights, u, index1, index2, gnode, gbond = inputs index1 = tf.reshape(index1, (-1, )) index2 = tf.reshape(index2, (-1, )) fs = tf.gather(node, index1, axis=1) fr = tf.gather(node, index2, axis=1) # print(fs.shape) concate_node = tf.concat([fs, fr], axis=-1) u_expand = repeat_with_index(u, gbond, axis=1) concated = tf.concat([concate_node, weights, u_expand], axis=-1) return self._mlp(concated, self.phi_e_weight, self.phi_e_bias)
def phi_v(self, b_ei_p, inputs): node, weights, u, index1, index2, gnode, gbond = inputs u_expand = repeat_with_index(u, gnode, axis=1) # print(u_expand.shape, node.shape) concated = tf.concat([b_ei_p, node, u_expand], axis=-1) return self._mlp(concated, self.phi_v_weight, self.phi_v_bias)