Exemple #1
0
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(state, 2, axis=1)
            input_list = [inputs, hidden_tm1]
            with tf.variable_scope('Gates'):
                gates = linear(inputs_list,
                               self.output_size,
                               add_bias=True,
                               n_splits=2,
                               moving_params=self.moving_params)
                update_act, reset_act = gates
                update_gate = gate(update_act - self.forget_bias)
                reset_gate = gate(reset_act)
                reset_state = reset_gate * hidden_tm1
            input_list = [inputs, reset_state]
            with tf.variable_scope('Candidate'):
                hidden_act = linear(input_list,
                                    self.output_size,
                                    add_bias=True,
                                    moving_params=self.moving_params)
                hidden_tilde = self.recur_func(hidden_act)
            cell_t = update_gate * cell_tm1 + (1 - update_gate) * hidden_tilde
        return cell_t, tf.concat([cell_t, cell_t], 1)
Exemple #2
0
 def linear(self, inputs, output_size, keep_prob=None, n_splits=1, add_bias=True, initializer=tf.zeros_initializer):
   """"""
   print("===nn.py: START linear", ' name=', self.name)
   #print ("nn.py,linear,keep:{}".format(keep_prob))
   if isinstance(inputs, (list, tuple)):
     n_dims = len(inputs[0].get_shape().as_list())
     inputs = tf.concat(inputs, n_dims-1)
   else:
     n_dims = len(inputs.get_shape().as_list())
   input_size = inputs.get_shape().as_list()[-1]
   
   if self.moving_params is None:
     keep_prob = keep_prob or self.mlp_keep_prob
   else:
     keep_prob = 1
   if keep_prob < 1:
     noise_shape = tf.stack([self.batch_size] + [1]*(n_dims-2) + [input_size])
     inputs = tf.nn.dropout(inputs, keep_prob, noise_shape=noise_shape)
   
   lin = linalg.linear(inputs,
                       output_size,
                       n_splits=n_splits,
                       add_bias=add_bias,
                       initializer=initializer,
                       moving_params=self.moving_params)
   
   if output_size == 1:
     if isinstance(lin, list):
       lin = [tf.squeeze(x, axis=(n_dims-1)) for x in lin]
     else:
       lin = tf.squeeze(lin, axis=(n_dims-1))
   print("===nn.py: END linear", ' name=', self.name, ' input_size=',input_size, ' output_size=', output_size, ' keep prob=',keep_prob)
   return lin
Exemple #3
0
    def __call__(self, placeholder=None, moving_params=None):
        """"""

        print('===tagrep_vocab.py: START Call ', ' name=', self.name,
              ' embed_size=', self._embed_size)
        embeddings = super(TagRepVocab,
                           self).__call__(placeholder,
                                          moving_params=moving_params)

        #tf.summary.histogram('tagreph',embeddings)
        # (n x b x d') -> (n x b x d)
        with tf.variable_scope(self.name.title()):
            matrix = linalg.linear(embeddings,
                                   self._embed_size,
                                   moving_params=moving_params)
            if moving_params is None:
                with tf.variable_scope('Linear', reuse=True):
                    weights = tf.get_variable('Weights')
                    tf.losses.add_loss(
                        tf.nn.l2_loss(
                            tf.matmul(tf.transpose(weights), weights) -
                            tf.eye(self._embed_size)))

        print('===tagrep_vocab.py: END Call ', ' name=', self.name)
        return matrix
Exemple #4
0
    def __call__(self, inputs, state, scope=None):
        """"""
        print('===lstm_cell.py: Call for LSTMCell', ' name=', self.name)

        with tf.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(state, 2, axis=1)
            input_list = [inputs, hidden_tm1]
            lin = linear(input_list,
                         self.output_size,
                         add_bias=True,
                         n_splits=4,
                         moving_params=self.moving_params)
            cell_act, input_act, forget_act, output_act = lin

            cell_tilde_t = tanh(cell_act)
            input_gate = gate(input_act)
            forget_gate = gate(forget_act - self.forget_bias)
            output_gate = gate(output_act)
            cell_t = input_gate * cell_tilde_t + (1 - forget_gate) * cell_tm1
            hidden_tilde_t = self.recur_func(cell_t)
            hidden_t = hidden_tilde_t * output_gate
            print("===lstm_cell.py: END Call for LSTMCell", " hidden_t_size=",
                  hidden_t.get_shape().as_list()[-1], " state_size=",
                  tf.concat([cell_t, hidden_t], 1).get_shape())
            return hidden_t, tf.concat([cell_t, hidden_t], 1)
Exemple #5
0
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.variable_scope(scope or type(self).__name__):
            inputs_list = [inputs, state]
            hidden_act = linear(inputs_list,
                                self.output_size,
                                add_bias=True,
                                moving_params=self.moving_params)
            hidden = self.recur_func(hidden_act)
        return hidden, hidden
Exemple #6
0
    def __call__(self, placeholder=None, moving_params=None):
        """"""

        embeddings = super(PretrainedVocab,
                           self).__call__(placeholder,
                                          moving_params=moving_params)
        # (n x b x d') -> (n x b x d)
        with tf.variable_scope(self.name.title()):
            matrix = linalg.linear(embeddings,
                                   self.token_embed_size,
                                   moving_params=moving_params)
            if moving_params is None:
                with tf.variable_scope('Linear', reuse=True):
                    weights = tf.get_variable('Weights')
                    tf.losses.add_loss(
                        tf.nn.l2_loss(
                            tf.matmul(tf.transpose(weights), weights) -
                            tf.eye(self.token_embed_size)))
        return matrix
Exemple #7
0
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(state, 2, axis=1)
            input_list = [inputs, hidden_tm1]
            lin = linear(input_list,
                         self.output_size,
                         add_bias=True,
                         n_splits=3,
                         moving_params=self.moving_params)
            cell_act, update_act, output_act = lin

            cell_tilde_t = cell_act
            update_gate = gate(update_act - self.forget_bias)
            output_gate = gate(output_act)
            cell_t = update_gate * cell_tilde_t + (1 - update_gate) * cell_tm1
            hidden_tilde_t = self.recur_func(cell_t)
            hidden_t = hidden_tilde_t * output_gate

            return hidden_t, tf.concat([cell_t, hidden_t], 1)