コード例 #1
0
    def _step(m_, x_, h_, c_):
      x_and_h = tensor.concatenate([x_, h_], axis=1)
      preact = tensor.dot(x_and_h, tparams["WLSTM"]) + tparams["bLSTM"]

      i = tensor.nnet.sigmoid(_lstm_slice(preact, 0, hidden_size))
      f = tensor.nnet.sigmoid(_lstm_slice(preact, 1, hidden_size))
      o = tensor.nnet.sigmoid(_lstm_slice(preact, 2, hidden_size))
      c = tensor.tanh(_lstm_slice(preact, 3, hidden_size))

      c = f * c_ + i * c
      c = m_[:, None] * c + (1. - m_)[:, None] * c_

      h = o * tensor.tanh(c)
      h = m_[:, None] * h + (1. - m_)[:, None] * h_

      return h, c
コード例 #2
0
    def _step(x_, h_, c_, is_complete_, n_samples):
      x_and_h = tensor.concatenate([x_, h_], axis=1)
      preact = tensor.dot(x_and_h, tparams["WLSTM"]) + tparams["bLSTM"]

      i = tensor.nnet.sigmoid(_lstm_slice(preact, 0, hidden_size))
      f = tensor.nnet.sigmoid(_lstm_slice(preact, 1, hidden_size))
      o = tensor.nnet.sigmoid(_lstm_slice(preact, 2, hidden_size))
      c = tensor.tanh(_lstm_slice(preact, 3, hidden_size))

      c = f * c_ + i * c
      h = o * tensor.tanh(c)
      
      decoder = tensor.dot(h, tparams['Wd']) + tparams['bd']
      softmax = tensor.nnet.softmax(decoder)
      predicted_prob, predicted_idx = tensor.max_and_argmax(softmax, axis=1)
      predicted_word_vector = tparams['Ws'][predicted_idx]
      
      is_end_reached = predicted_idx <= 0
      is_complete_ = is_complete_ + is_end_reached
      is_complete_sum = tensor.sum(is_complete_)
      
      return (predicted_word_vector, h, c, is_complete_, predicted_idx, predicted_prob), scan_module.until(tensor.eq(is_complete_sum, n_samples))