예제 #1
0
파일: idea.py 프로젝트: wuqiangch/SoftPool
 def backward(ctx, grad_output):
     grad_input = torch.zeros_like(ctx.saved_tensors[0])
     saved = [grad_output] + list(
         ctx.saved_tensors) + [ctx.kernel, ctx.stride] + [grad_input]
     softpool_cuda.backward_1d(*saved)
     # Gradient underflow
     saved[-1][torch.isnan(saved[-1])] = 0
     return saved[-1], None, None
예제 #2
0
 def backward(ctx, grad_output):
     # Create contiguous tensor (if tensor is not contiguous)
     if (not grad_output.is_contiguous()):
         x = grad_output.contiguous()
     grad_input = torch.zeros_like(ctx.saved_tensors[0])
     saved = [grad_output] + list(ctx.saved_tensors) + [ctx.kernel, ctx.stride] + [grad_input]
     softpool_cuda.backward_1d(*saved)
     # Gradient underflow
     saved[-1][torch.isnan(saved[-1])] = 0
     return saved[-1], None, None