def backward(self, grad_output): input = self._saved_tensor grad_input, self.grad_W, self.grad_b = conv2d_backward(input, grad_output, self.W, self.b, self.kernel_size, self.pad) #print(grad_input) #print(self.grad_W[0][0][0]) #print(self.grad_b) return grad_input
def backward(self, grad_output): input_with_pad = self._saved_tensor grad_input, self.grad_W, self.grad_b = conv2d_backward( input_with_pad, grad_output, self.W, self.b, self.kernel_size, self.pad) return grad_input
'[FAILED] conv2d_forward: bug in codes, can not run for inp.shape = (4, 3, 6, 6), w.shape = (4, 3, 2, 2), ker_size = 2, pad = 0' ) else: if test_conv_out.shape != out.shape: print('[ERROR] conv2d_forward: output shape is not correct') else: diff = test_conv_out - out if abs(diff).max() > 1e-5: print('[ERROR] conv2d_forward: output value is not correct') else: print('[PASS] conv2d_forward: all correct') flag = 1 try: test_grad_inp, test_grad_w, test_grad_b = conv2d_backward( inp, grad_out, w, b, 2, 0) except: print( '[FAILED] conv2d_backward: bug in codes, can not run for inp.shape = (4, 3, 6, 6), grad.shape = (4, 4, 7, 7), w.shape = (4, 3, 2, 2), ker_size = 2, pad = 0' ) flag = 0 else: if test_grad_inp.shape != grad_inp.shape: print('[ERROR] conv2d_backward: grad_input shape is not correct') flag = 0 else: diff = test_grad_inp - grad_inp if abs(diff).max() > 1e-5: print('[ERROR] conv2d_backward: grad_input value is not correct') flag = 0