def backward(self, top, propagate_down, bottom): from caffe_helper.theano_util import blob_to_CudaNdArray if not propagate_down[0]: return b, bdiff = blob_to_CudaNdArray(bottom[0]) _, tdiff = blob_to_CudaNdArray(top[0]) bdiff[...] = self.f_backward(b, tdiff)
def forward(self, bottom, top): from caffe_helper.theano_util import blob_to_CudaNdArray y, _ = blob_to_CudaNdArray(bottom[0]) t, _ = blob_to_CudaNdArray(bottom[1]) l, _ = blob_to_CudaNdArray(top[0]) s = (y.shape[0], int(np.prod(y.shape[1:]))) l[...] = self.tn_forward(y.reshape(s), t.reshape(s))
def backward(self, top, propagate_down, bottom): from caffe_helper.theano_util import blob_to_CudaNdArray y, dy = blob_to_CudaNdArray(bottom[0]) t, _ = blob_to_CudaNdArray(bottom[1]) _, dl = blob_to_CudaNdArray(top[0]) s = (y.shape[0], int(np.prod(y.shape[1:]))) dy[...] = self.tn_backward(y.reshape(s), t.reshape(s), dl).reshape(dy.shape)
def backward(self, top, propagate_down, bottom): from caffe_helper.theano_util import blob_to_CudaNdArray y, dy = blob_to_CudaNdArray(bottom[0]) t, _ = blob_to_CudaNdArray(bottom[1]) _, dl = blob_to_CudaNdArray(top[0]) s = (y.shape[0], int(np.prod(y.shape[1:]))) dy[...] = self.tn_backward( y.reshape(s), t.reshape(s), dl).reshape(dy.shape)
def backward(self, top, propagate_down, bottom): if not propagate_down[0]: return assert not propagate_down[1] from caffe_helper.theano_util import blob_to_CudaNdArray p, dp = blob_to_CudaNdArray(bottom[0]) t, _ = blob_to_CudaNdArray(bottom[1]) l, dz = blob_to_CudaNdArray(top[0]) dp[...] = self.f_backward(p, t, dz)
def forward(self, bottom, top): from caffe_helper.theano_util import blob_to_CudaNdArray b, _ = blob_to_CudaNdArray(bottom[0]) t, _ = blob_to_CudaNdArray(top[0]) t[...] = self.f_forward(b)