def representation(self) -> tf.Tensor: """ Computes the hidden representation of the input sequences. The hidden representation of an input sequence is computed by applying a linear transformation with hyperbolic tangent activation to the final state of the encoder. The output size of the linear transformation matches the state vector size of the decoder. Returns ------- tf.Tensor The hidden representation of the input sequences, of shape [batch_size, decoder_state_size] """ # shape: [max_time * batch_size, encoder.state_size] internal_rep = tf.tanh( linear(self.encoder.final_state, self.decoder_architecture.state_size)) # shape: [max_time, batch_size, encoder.state_size] rep = tf.reshape(internal_rep, [ self.max_time, self.batch_size, self.encoder_architecture.state_size ]) # shape: [batch_size, max_time, encoder.state_size] rep = tf.transpose(rep, perm=[1, 0, 2]) tf.add_to_collection("representation", rep) summaries.variable_summaries(rep) return internal_rep
def representation(self) -> tf.Tensor: """ Computes the hidden representation of the input sequences. The hidden representation of an input sequence is computed by applying a linear transformation with hyperbolic tangent activation to the final state of the time encoder. The output size of the linear transformation matches the state vector size of the time decoder. Returns ------- tf.Tensor The hidden representation of the input sequences, of shape [batch_size, time_decoder_state_size] """ representation = tf.tanh(linear(input=self.encoder_time.final_state, output_size=self.t_decoder_architecture.state_size)) tf.add_to_collection("representation", representation) summaries.variable_summaries(representation) return representation