예제 #1
0
파일: gru_cell.py 프로젝트: sanyu12/LISA-v1
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(axis=1,
                                            num_or_size_splits=2,
                                            value=state)
            with tf.variable_scope('Gates'):
                linear = linalg.linear([inputs, hidden_tm1],
                                       self.output_size,
                                       add_bias=True,
                                       n_splits=2,
                                       moving_params=self.moving_params)
                update_act, reset_act = linear
                update_gate = linalg.sigmoid(update_act - self.forget_bias)
                reset_gate = linalg.sigmoid(reset_act)
                reset_state = reset_gate * hidden_tm1
            with tf.variable_scope('Candidate'):
                hidden_act = linalg.linear([inputs, reset_state],
                                           self.output_size,
                                           add_bias=True,
                                           moving_params=self.moving_params)
                hidden_tilde = self.recur_func(hidden_act)
            cell_t = update_gate * cell_tm1 + (1 - update_gate) * hidden_tilde
        return cell_t, tf.concat(axis=1, values=[cell_t, cell_t])
예제 #2
0
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(1, 2, state)
            linear = linalg.linear([inputs, hidden_tm1],
                                   self.output_size,
                                   add_bias=False,
                                   n_splits=4,
                                   moving_params=self.moving_params)
            with tf.variable_scope('Linear'):
                biases = tf.get_variable('Biases', [3 * self.output_size],
                                         initializer=tf.zeros_initializer)
            biases = tf.split(0, 3, biases)
            cell_act, input_act, forget_act, output_act = linear
            input_bias, forget_bias, output_bias = biases

            cell_tilde_t = linalg.tanh(cell_act)
            input_gate = linalg.sigmoid(input_act + input_bias)
            forget_gate = linalg.sigmoid(forget_act + forget_bias -
                                         self.forget_bias)
            output_gate = linalg.sigmoid(output_act + output_bias)
            cell_t = input_gate * cell_tilde_t + (1 - forget_gate) * cell_tm1
            hidden_tilde_t = self.recur_func(cell_t)
            hidden_t = hidden_tilde_t * output_gate

            return hidden_t, tf.concat(1, [cell_t, hidden_t])
예제 #3
0
    def __call__(self, inputs, state, scope=None):
        """"""

        if self.recur_diag_bilin:
            inputs1, inputs2 = tf.split(1, 2, inputs)
            inputs = tf.concat(1, [inputs1 * inputs2, inputs1, inputs2])
        with tf.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(1, 2, state)
            linear = linalg.linear([inputs, hidden_tm1],
                                   self.output_size,
                                   add_bias=True,
                                   n_splits=3,
                                   moving_params=self.moving_params)
            cell_act, update_act, output_act = linear

            cell_tilde_t = cell_act
            update_gate = linalg.sigmoid(update_act - self.forget_bias)
            output_gate = linalg.sigmoid(output_act)
            cell_t = update_gate * cell_tilde_t + (1 - update_gate) * cell_tm1
            hidden_tilde_t = self.recur_func(cell_t)
            hidden_t = hidden_tilde_t * output_gate

            if self.hidden_include_prob < 1 and self.moving_params is None:
                hidden_mask = tf.nn.dropout(
                    tf.ones_like(hidden_t),
                    self.hidden_include_prob) * self.hidden_include_prob
                hidden_t = hidden_mask * hidden_t + (1 -
                                                     hidden_mask) * hidden_tm1
            if self.cell_include_prob < 1 and self.moving_params is None:
                cell_mask = tf.nn.dropout(
                    tf.ones_like(cell_t),
                    self.cell_include_prob) * self.cell_include_prob
                cell_t = cell_mask * cell_t + (1 - cell_mask) * cell_tm1

            return hidden_t, tf.concat(1, [cell_t, hidden_t])
예제 #4
0
파일: lstm_cell.py 프로젝트: kleag/LISA-v1
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.compat.v1.variable_scope(scope or type(self).__name__):
            cell_tm1, hidden_tm1 = tf.split(axis=1,
                                            num_or_size_splits=2,
                                            value=state)
            if self.recur_diag_bilin:
                inputs1, inputs2 = tf.split(axis=1,
                                            num_or_size_splits=2,
                                            value=inputs)
                input_list = [inputs1 * inputs2, inputs1 + inputs2, hidden_tm1]
            else:
                input_list = [inputs, hidden_tm1]
            linear = linalg.linear(input_list,
                                   self.output_size,
                                   add_bias=False,
                                   n_splits=4,
                                   moving_params=self.moving_params)
            with tf.compat.v1.variable_scope('Linear'):
                biases = tf.compat.v1.get_variable(
                    'Biases', [4 * self.output_size],
                    initializer=tf.zeros_initializer())
            biases = tf.split(axis=0, num_or_size_splits=4, value=biases)
            cell_act, input_act, forget_act, output_act = linear
            cell_bias, input_bias, forget_bias, output_bias = biases

            cell_tilde_t = linalg.tanh(cell_act + cell_bias)
            input_gate = linalg.sigmoid(input_act + input_bias)
            forget_gate = linalg.sigmoid(forget_act + forget_bias -
                                         self.forget_bias)
            output_gate = linalg.sigmoid(output_act + output_bias)
            cell_t = input_gate * cell_tilde_t + (1 - forget_gate) * cell_tm1
            hidden_tilde_t = self.recur_func(cell_t)
            hidden_t = hidden_tilde_t * output_gate

            if self.hidden_include_prob < 1 and self.moving_params is None:
                hidden_mask = tf.nn.dropout(
                    tf.ones_like(hidden_t),
                    self.hidden_include_prob) * self.hidden_include_prob
                hidden_t = hidden_mask * hidden_t + (1 -
                                                     hidden_mask) * hidden_tm1
            if self.cell_include_prob < 1 and self.moving_params is None:
                cell_mask = tf.nn.dropout(
                    tf.ones_like(cell_t),
                    self.cell_include_prob) * self.cell_include_prob
                cell_t = cell_mask * cell_t + (1 - cell_mask) * cell_tm1

            return hidden_t, tf.concat(axis=1, values=[cell_t, hidden_t])
예제 #5
0
    def __call__(self, inputs, state, scope=None):
        """"""

        with tf.variable_scope(scope or type(self).__name__):
            with tf.variable_scope('Gates'):
                linear = linalg.linear([inputs, state],
                                       self.output_size,
                                       add_bias=True,
                                       n_splits=2,
                                       moving_params=self.moving_params)
                update_act, reset_act = linear
                update_gate = linalg.sigmoid(update_act - self.forget_bias)
                reset_gate = linalg.sigmoid(reset_act)
                reset_state = reset_gate * state
            with tf.variable_scope('Candidate'):
                hidden_act = linalg.linear([inputs, reset_state],
                                           self.output_size,
                                           add_bias=False,
                                           moving_params=self.moving_params)
                hidden_tilde = self.recur_func(hidden_act)
            hidden = update_gate * state + (1 - update_gate) * hidden_tilde
        return hidden, hidden