示例#1
0
def test_attention():
    cell = nn.rnn_cell.GRUCell(512)
    encoder_output = tf.placeholder(tf.float32, shape=(128, 8, 512))
    encoder_len = tf.placeholder(tf.int32, shape=128)
    cell = AttentionCell(cell, encoder_output, encoder_len, 512)
    state_input = cell.zero_state(128, dtype=tf.float32)
    inputs = [ tf.placeholder(tf.float32, shape=(128, 512)) for _ in range(8) ]
    nn.static_rnn(cell, inputs, state_input)
示例#2
0
 def forward(self, x, computation_mode=MakiRestorable.INFERENCE_MODE):
     if self._dynamic:
         dynamic_x = dynamic_rnn(self._cell, x, dtype=tf.float32)
         # hidden states, (last candidate value, last hidden state)
         hs, (c_last, h_last) = dynamic_x
         return hs, c_last, h_last
     else:
         unstack_x = tf.unstack(x, axis=1)
         static_x = static_rnn(self._cell, unstack_x, dtype=tf.float32)
         hs_list, (c_last, h_last) = static_x
         hs = tf.stack(hs_list, axis=1)
         return hs, c_last, h_last
示例#3
0
def rnn(cell, inputs, initial_state=None, dtype=None,
        sequence_length=None, scope=None, bidi=False):
    """Create encoder RNN with the given cell type
    (allows left-to-right or bidi encoders)."""
    if bidi:
        outputs, state_fw, state_bw = static_bidirectional_rnn(
            cell, cell, inputs, initial_state, initial_state, dtype, sequence_length, scope)
        if isinstance(state_fw, tuple):  # add up LSTM states part-by-part
            return outputs, (state_fw[0] + state_bw[0], state_fw[1] + state_bw[1])
        return outputs, state_fw + state_bw
    else:
        return static_rnn(cell, inputs, initial_state, dtype, sequence_length, scope)
示例#4
0
def lstm(X):
    n_hidden = 512
    n_layers = 2
    cell = rnn.MultiRNNCell(
        [rnn.BasicLSTMCell(n_hidden) for _ in range(n_layers)])
    outputs, _ = nn.static_rnn(cell, [X[i] for i in range(X.shape[0])],
                               dtype="float")
    W = tf.get_variable('W',
                        initializer=tf.random_normal([n_hidden, INPUT_DIM]),
                        dtype='float')
    b = tf.get_variable('b', initializer=tf.zeros([INPUT_DIM]), dtype='float')
    pred = tf.matmul(outputs[-1], W) + b

    return pred
示例#5
0
 def forward(self, X, is_training=False):
     if self.cell_type == CellType.Bidir_Dynamic:
         return bidirectional_dynamic_rnn(cell_fw=self.cells,
                                          cell_bw=self.cells,
                                          inputs=X,
                                          dtype=tf.float32)
     elif self.cell_type == CellType.Bidir_Static:
         X = tf.unstack(X, num=self.seq_length, axis=1)
         return static_bidirectional_rnn(cell_fw=self.cells,
                                         cell_bw=self.cells,
                                         inputs=X,
                                         dtype=tf.float32)
     elif self.cell_type == CellType.Dynamic:
         return dynamic_rnn(self.cells, X, dtype=tf.float32)
     elif self.cell_type == CellType.Static:
         X = tf.unstack(X, num=self.seq_length, axis=1)
         return static_rnn(self.cells, X, dtype=tf.float32)
示例#6
0
 def forward(self, x, computation_mode=MakiRestorable.INFERENCE_MODE):
     if self._cell_type == CellType.BIDIR_DYNAMIC:
         (outputs_f, outputs_b), (states_f, states_b) = \
             bidirectional_dynamic_rnn(cell_fw=self._cells, cell_bw=self._cells, inputs=x, dtype=tf.float32)
         # Creation of the two MakiTensors for both `outputs_f` and `outputs_b` is inappropriate since
         # the algorithm that builds the computational graph does not consider such case and
         # therefore can not handle this situation, it will cause an error.
         self._cells_state = tf.concat([states_f, states_b], axis=-1)
         return tf.concat([outputs_f, outputs_b], axis=-1)
     elif self._cell_type == CellType.BIDIR_STATIC:
         x = tf.unstack(x, num=self._seq_length, axis=1)
         outputs_fb, states_f, states_b = \
             static_bidirectional_rnn(cell_fw=self._cells, cell_bw=self._cells, inputs=x, dtype=tf.float32)
         self._cells_state = tf.concat([states_f, states_f], axis=-1)
         return outputs_fb
     elif self._cell_type == CellType.DYNAMIC:
         outputs, states = dynamic_rnn(self._cells, x, dtype=tf.float32)
         self._cells_state = states
         return outputs
     elif self._cell_type == CellType.STATIC:
         x = tf.unstack(x, num=self._seq_length, axis=1)
         outputs, states = static_rnn(self._cells, x, dtype=tf.float32)
         self._cells_state = states
         return tf.stack(outputs, axis=1)