def decoder_time(self) -> FeedPreviousRNN:
     """
     Creates the time decoder RNN.
     
     The time decoder RNN receives the time decoder input sequence as input, and is initialized with the hidden 
     representation as the initial state vector.
     
     Returns
     -------
     FeedPreviousRNN
         The time decoder RNN
     """
     return FeedPreviousRNN(architecture=self.t_decoder_architecture,
                            inputs=self.decoder_time_inputs,
                            initial_state=self.representation,
                            keep_prob=self.keep_prob,
                            feed_previous_prob=self.t_decoder_feed_previous_prob)
 def decoder_frequency(self) -> FeedPreviousRNN:
     """
     Creates the frequency decoder RNN.
     
     The frequency decoder RNN receives the decoder input sequence as input, and is initialized with the time decoder 
     output at each time step as the initial state vector.
     
     Returns
     -------
     FeedPreviousRNN
         The frequency decoder RNN
     """
     return FeedPreviousRNN(architecture=self.f_decoder_architecture,
                            inputs=self.decoder_frequency_inputs,
                            initial_state=self.decoder_frequency_initial_state,
                            keep_prob=self.keep_prob,
                            feed_previous_prob=self.f_decoder_feed_previous_prob)
Example #3
0
    def decoder(self) -> _RNNBase:
        """
        Creates the decoder RNN.
        
        The decoder RNN receives the decoder input sequence as input, and is initialized with the hidden representation
        as the initial state vector. 
        
        If the decoder architecture is bidirectional, we have currently disabled using a FeedPreviousRNN due to possibly
        lower model performance. A suitable warning will be emitted to notify users of this behavior.
        
        Returns
        -------
        _RNNBase
            The decoder RNN
        """
        if self.decoder_architecture.bidirectional:
            self.log.warn(
                "'decoder_feed_previous_prob' set on bidirectional decoder will be ignored. If you have set "
                "--decoder-feed-prob 0, or omitted the option, the network will behave as expected and you "
                "can safely ignore this warning.")

            # Make sure that decoder_feed_previous_prob stays in the computation graph, otherwise TensorFlow will
            # complain that we pass decoder_feed_previous_prob in input map although it is not in the graph. This is
            # somewhat questionable behavior on TensorFlow's side, which we just have to live with.
            add = tf.add(self.decoder_feed_previous_prob, 1)

            with tf.control_dependencies([add]):
                return StandardRNN(architecture=self.decoder_architecture,
                                   inputs=self.decoder_inputs,
                                   initial_state=self.representation,
                                   keep_prob=self.keep_prob,
                                   input_noise=None)
        else:
            return FeedPreviousRNN(
                architecture=self.decoder_architecture,
                inputs=self.decoder_inputs,
                initial_state=self.representation,
                keep_prob=self.keep_prob,
                feed_previous_prob=self.decoder_feed_previous_prob)