Exemple #1
0
    def __init__(self, config):
        self.embed_dim = config.encoder_dim
        self.config = config
        self.input_noise = tf.placeholder(shape=[None, self.embed_dim],
                                          dtype=tf.float32)
        self.input_sequence = tf.placeholder(shape=[None, MAXLEN],
                                             dtype=tf.int64)

        self.uniform_initializer = tf.random_uniform_initializer(
            minval=-self.config.uniform_init_scale,
            maxval=self.config.uniform_init_scale)

        self.cell = gru_cell.LayerNormGRUCell(
            self.embed_dim,
            w_initializer=tf.random_uniform_initializer(minval=-0.1,
                                                        maxval=0.1),
            u_initializer=random_orthonormal_initializer,
            b_initializer=tf.random_uniform_initializer(minval=-0.1,
                                                        maxval=0.1))

        self.word_embedding = tf.get_variable(
            'target_W',
            shape=[self.config.vocab_size, self.config.word_embedding_dim],
            initializer=self.uniform_initializer)
        self.output_W = tf.get_variable(
            'output_W',
            shape=[self.embed_dim, self.config.vocab_size],
            initializer=self.uniform_initializer)
        self.output_b = tf.get_variable('output_b',
                                        shape=[self.config.vocab_size],
                                        initializer=self.uniform_initializer)
        self.build_likelihood()
        self.build_rollout()
    def _initialize_gru_cell(self, num_units):
        """Initializes a GRU cell.

    The Variables of the GRU cell are initialized in a way that exactly matches
    the skip-thoughts paper: recurrent weights are initialized from random
    orthonormal matrices and non-recurrent weights are initialized from random
    uniform matrices.

    Args:
      num_units: Number of output units.

    Returns:
      cell: An instance of RNNCell with variable initializers that match the
        skip-thoughts paper.
    """
        return gru_cell.LayerNormGRUCell(
            num_units,
            w_initializer=self.uniform_initializer,
            u_initializer=random_orthonormal_initializer,
            b_initializer=tf.constant_initializer(0.0))
Exemple #3
0
 def _initialize_gru_cell(self, num_units):
     return gru_cell.LayerNormGRUCell(
         num_units,
         w_initializer=self.uniform_initializer,
         u_initializer=random_orthonormal_initializer,
         b_initializer=tf.constant_initializer(0.0))