Пример #1
0
    def __call__(self, inputs, state, timestep=0, scope=None):
        with tf.device("/gpu:" + str(self._gpu_for_layer)):
            """Long short-term memory cell (LSTM)."""
            with tf.variable_scope(scope
                                   or type(self).__name__):  # "BasicLSTMCell"
                # Parameters of gates are concatenated into one multiply for efficiency.
                c, h = state

                concat = linear([inputs, h], self._num_units * 4, False, 0.0)
                concat = layers.layer_norm(concat, reuse=None)

                # i = input_gate, j = new_input, f = forget_gate, o = output_gate
                i, j, f, o = tf.split(concat, 4, 1)

                if self.use_recurrent_dropout and self.is_training:
                    input_contribution = tf.nn.dropout(
                        tf.tanh(j), self.recurrent_dropout_factor)
                else:
                    input_contribution = tf.tanh(j)

                new_c = c * tf.sigmoid(f + self._forget_bias) + tf.sigmoid(
                    i) * input_contribution
                with tf.variable_scope('new_h_output'):
                    new_h = tf.tanh(layers.layer_norm(
                        new_c, reuse=None)) * tf.sigmoid(o)

            new_state = rnn_cell_impl.LSTMStateTuple(new_c, new_h)
            # return new_h, tf.concat([new_h, new_c], 1) #purposely reversed
            return new_h, new_state
Пример #2
0
def layer_norm(in_tensor, data_format='NHWC'):
    if data_format == 'NCHW':
        in_tensor = tf.transpose(in_tensor, (0, 2, 3, 1))
    out_tensor = layer_norm(in_tensor)
    if data_format == 'NCHW':
        out_tensor = tf.transpose(out_tensor, (0, 3, 1, 2))
    return out_tensor
Пример #3
0
 def _norm(self, inp, scope):
   with vs.variable_scope(scope) as scope:
     shape = inp.get_shape()[-1:]
     gamma_init = init_ops.constant_initializer(self._g)
     beta_init = init_ops.constant_initializer(self._b)
     gamma = vs.get_variable("gamma", shape=shape, initializer=gamma_init)  # pylint: disable=unused-variable
     beta = vs.get_variable("beta", shape=shape, initializer=beta_init)  # pylint: disable=unused-variable
     normalized = layers.layer_norm(inp, reuse=True, scope=scope)
     return normalized
Пример #4
0
 def _norm(self, inp, scope):
     shape = inp.get_shape()[-1:]
     gamma_init = init_ops.constant_initializer(self._g)
     beta_init = init_ops.constant_initializer(self._b)
     with vs.variable_scope(scope):
         # Initialize beta and gamma for use by layer_norm.
         vs.get_variable("gamma", shape=shape, initializer=gamma_init)
         vs.get_variable("beta", shape=shape, initializer=beta_init)
     normalized = layers.layer_norm(inp, reuse=True, scope=scope)
     return normalized
Пример #5
0
 def _norm(self, inp, scope):
   shape = inp.get_shape()[-1:]
   gamma_init = init_ops.constant_initializer(self._g)
   beta_init = init_ops.constant_initializer(self._b)
   with vs.variable_scope(scope):
     # Initialize beta and gamma for use by layer_norm.
     vs.get_variable("gamma", shape=shape, initializer=gamma_init)
     vs.get_variable("beta", shape=shape, initializer=beta_init)
   normalized = layers.layer_norm(inp, reuse=True, scope=scope)
   return normalized
Пример #6
0
 def layer(x, name=None):
   with variable_scope.variable_scope(name, default_name="layer"):
     x = layers.layer_norm(x)
     x = convolutional.conv1d(
         x,
         10,
         1,
         use_bias=False,
         kernel_initializer=init_ops.constant_initializer(42.42))
     x = nn_ops.relu(x)
     return x
 def layer(x, name=None):
     with variable_scope.variable_scope(name, default_name="layer"):
         x = layers.layer_norm(x)
         x = convolutional.conv1d(
             x,
             10,
             1,
             use_bias=False,
             kernel_initializer=init_ops.constant_initializer(42.42))
         x = nn_ops.relu(x)
         return x
Пример #8
0
    def _norm(self, inp, scope="layer_norm"):
        """ TODO

        """
        shape = inp.get_shape()[-1:]
        gamma_init = init_ops.constant_initializer(1)
        beta_init = init_ops.constant_initializer(1)
        with tf.variable_scope(scope):
            tf.get_variable("gamma", shape=shape, initializer=gamma_init)
            tf.get_variable("beta", shape=shape, initializer=beta_init)
        normalized = layers.layer_norm(inp, reuse=True, scope=scope)
        return normalized
    def _norm(self, inp, scope="layer_norm"):
        """

        """
        shape = inp.get_shape()[-1:]
        gamma_init = init_ops.constant_initializer(self._g)
        beta_init = init_ops.constant_initializer(self._b)
        with vs.variable_scope(scope, reuse=tf.AUTO_REUSE):
            vs.get_variable("gamma", shape=shape, initializer=gamma_init)
            vs.get_variable("beta", shape=shape, initializer=beta_init)
        normalized = layers.layer_norm(inp, reuse=True, scope=scope)
        return normalized
Пример #10
0
    def _norm(self, inp, scope="layer_norm"):
        """ 
        Performs layer normalization on the hidden state.

        inp = the input to be normalized
        
        Returns inp normalized by learned parameters gamma and beta
        """
        shape = inp.get_shape()[-1:]
        gamma_init = init_ops.constant_initializer(self._g)
        beta_init = init_ops.constant_initializer(self._b)
        with vs.variable_scope(scope):
            vs.get_variable("gamma", shape=shape, initializer=gamma_init)
            vs.get_variable("beta", shape=shape, initializer=beta_init)
        normalized = layers.layer_norm(inp, reuse=True, scope=scope)
        return normalized
Пример #11
0
    def _norm(self, inp, scope="layer_norm"):
        """ 
        Performs layer normalization on the hidden state.

        inp = the input to be normalized
        scope = name for the variable scope, just leave as default
        
        Returns inp normalized by learned parameters gamma and beta
        """
        shape = inp.get_shape()[-1:]
        gamma_init = init_ops.constant_initializer(1)
        beta_init = init_ops.constant_initializer(1)
        with tf.variable_scope(scope):
            tf.get_variable("gamma", shape=shape, initializer=gamma_init)
            tf.get_variable("beta", shape=shape, initializer=beta_init)
        normalized = layers.layer_norm(inp, reuse=True, scope=scope)
        return normalized
Пример #12
0
 def _norm(self, inp, scope):
     return layers.layer_norm(inp, reuse=True, scope=scope)