Пример #1
0
def get_attention(att_class, **kwargs):
    import OpLSTMCustom
    recurrent_transform = RecurrentTransform.get_dummy_recurrent_transform(
        att_class.name, **kwargs)
    assert isinstance(recurrent_transform, att_class)
    f = OpLSTMCustom.register_func(recurrent_transform)
    return f
Пример #2
0
 def scan(self,
          x,
          z,
          non_sequences,
          i,
          outputs_info,
          W_re,
          W_in,
          b,
          go_backwards=False,
          truncate_gradient=-1):
     assert self.parent.recurrent_transform
     import OpLSTMCustom
     op = OpLSTMCustom.register_func(self.parent.recurrent_transform)
     custom_vars = self.parent.recurrent_transform.get_sorted_custom_vars()
     initial_state_vars = self.parent.recurrent_transform.get_sorted_state_vars_initial(
     )
     # See OpLSTMCustom.LSTMCustomOp.
     # Inputs args are: Z, c, y0, i, W_re, custom input vars, initial state vars
     # Results: (output) Y, (gates and cell state) H, (final cell state) d, state vars sequences
     op_res = op(z[::-(2 * go_backwards - 1)], outputs_info[1],
                 outputs_info[0], i[::-(2 * go_backwards - 1)],
                 T.ones((i.shape[1], ), 'float32'), W_re,
                 *(custom_vars + initial_state_vars))
     result = [op_res[0], op_res[2].dimshuffle('x', 0, 1)] + op_res[3:]
     assert len(result) == len(outputs_info)
     return result
Пример #3
0
 def scan(self, x, z, non_sequences, i, outputs_info, W_re, W_in, b, go_backwards = False, truncate_gradient = -1):
   assert self.parent.recurrent_transform
   import OpLSTMCustom
   op = OpLSTMCustom.register_func(self.parent.recurrent_transform)
   custom_vars = self.parent.recurrent_transform.get_sorted_custom_vars()
   initial_state_vars = self.parent.recurrent_transform.get_sorted_state_vars_initial()
   # See OpLSTMCustom.LSTMCustomOp.
   # Inputs args are: Z, c, y0, i, W_re, custom input vars, initial state vars
   # Results: (output) Y, (gates and cell state) H, (final cell state) d, state vars sequences
   op_res = op(z[::-(2 * go_backwards - 1)],
               outputs_info[1], outputs_info[0], i[::-(2 * go_backwards - 1)], T.ones((i.shape[1],),'float32'), W_re, *(custom_vars + initial_state_vars))
   result = [ op_res[0], op_res[2].dimshuffle('x',0,1) ] + op_res[3:]
   assert len(result) == len(outputs_info)
   return result
Пример #4
0
def get_attention(att_class, **kwargs):
  import OpLSTMCustom
  recurrent_transform = RecurrentTransform.get_dummy_recurrent_transform(att_class.name, **kwargs)
  assert isinstance(recurrent_transform, att_class)
  f = OpLSTMCustom.register_func(recurrent_transform)
  return f