Exemplo n.º 1
0
 def backward(self, bottom, top, propagate_down):
     """Runs the backward pass."""
     top_diff = top[0].diff()
     bottom_data = bottom[0].data()
     kernel_diff = self._kernels.init_diff()
     kernel_diff_buffer = np.zeros_like(kernel_diff)
     col_diff = self._col.init_diff()
     if propagate_down:
         bottom_diff = bottom[0].init_diff()
     if self._mode != 'valid':
         pad_diff = self._padded.init_diff()
     for i in range(bottom_data.shape[0]):
         if self._mode != 'valid':
             # do padding
             pad_diff[0, self._border:-self._border,
                      self._border:-self._border] = top_diff[i]
         else:
             pad_diff = top_diff[i:i+1].view()
         # run im2col
         wrapper.im2col_forward(pad_diff, col_diff, self._ksize,
                                self._stride)
         blasdot.dot_firstdims(bottom_data[i], col_diff,
                              out=kernel_diff_buffer)
         kernel_diff += kernel_diff_buffer
         if propagate_down:
             # compute final gradient
             blasdot.dot_lastdim(col_diff, self._kernels.data().T,
                                 out=bottom_diff[i])
     # finally, add the regularization term
     if self._reg is not None:
         return self._reg.reg(self._kernels, bottom_data.shape[0])
     else:
         return 0.
Exemplo n.º 2
0
 def backward(self, bottom, top, propagate_down):
     """Runs the backward pass."""
     top_diff = top[0].diff()
     padded_data = self._padded.data()
     col_data = self._col.data()
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     kernel_diff = self._kernels.init_diff()
     if self._has_bias:
         bias_diff = self._bias.init_diff()
         # bias diff is fairly easy to compute: just sum over all other
         # dimensions
         np.sum(top_diff.reshape(top_diff.size / top_diff.shape[-1],
                                 top_diff.shape[-1]),
                axis=0, out=bias_diff)
     if propagate_down:
         bottom_diff = bottom[0].init_diff(setzero=False)
         col_diff = self._col.init_diff()
         if self._pad_size == 0:
             padded_diff = self._padded.mirror_diff(bottom_diff)
         else:
             padded_diff = self._padded.init_diff(setzero=False)
     if self._large_mem:
         # we have the col_data all pre-stored, making things more efficient.
         blasdot.dot_firstdims(col_data, top_diff, out=kernel_diff)
         if propagate_down:
             blasdot.dot_lastdim(top_diff, self._kernels.data().T,
                                 out=col_diff)
             wrapper.im2col_backward(padded_diff, col_diff,
                                 self._ksize, self._stride)
     else:
         kernel_diff_buffer = np.zeros_like(kernel_diff)
         for i in range(bottom_data.shape[0]):
             # although it is a backward layer, we still need to compute
             # the intermediate results using forward calls.
             wrapper.im2col_forward(padded_data[i:i+1], col_data,
                                    self._ksize, self._stride)
             blasdot.dot_firstdims(col_data, top_diff[i],
                                  out=kernel_diff_buffer)
             kernel_diff += kernel_diff_buffer
             if propagate_down:
                 blasdot.dot_lastdim(top_diff[i], self._kernels.data().T,
                                     out=col_diff)
                 # im2col backward
                 wrapper.im2col_backward(padded_diff[i:i+1], col_diff,
                                         self._ksize, self._stride)
     # finally, copy results to the bottom diff.
     if propagate_down:
         if self._pad_size != 0:
             bottom_diff[:] = padded_diff[:,
                                          self._pad_size:-self._pad_size,
                                          self._pad_size:-self._pad_size]
     # finally, add the regularization term
     if self._reg is not None:
         return self._reg.reg(self._kernels, bottom_data.shape[0])
     else:
         return 0.
Exemplo n.º 3
0
 def backward(self, bottom, top, propagate_down):
     """Runs the backward pass."""
     top_diff = top[0].diff()
     padded_data = self._padded.data()
     col_data = self._col.data()
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     kernel_diff = self._kernels.init_diff()
     if self._has_bias:
         bias_diff = self._bias.init_diff()
         # bias diff is fairly easy to compute: just sum over all other
         # dimensions
         np.sum(top_diff.reshape(top_diff.size / top_diff.shape[-1],
                                 top_diff.shape[-1]),
                axis=0,
                out=bias_diff)
     if propagate_down:
         bottom_diff = bottom[0].init_diff(setzero=False)
         col_diff = self._col.init_diff()
         if self._pad_size == 0:
             padded_diff = self._padded.mirror_diff(bottom_diff)
         else:
             padded_diff = self._padded.init_diff(setzero=False)
     if self._large_mem:
         # we have the col_data all pre-stored, making things more efficient.
         blasdot.dot_firstdims(col_data, top_diff, out=kernel_diff)
         if propagate_down:
             blasdot.dot_lastdim(top_diff,
                                 self._kernels.data().T,
                                 out=col_diff)
             wrapper.im2col_backward(padded_diff, col_diff, self._ksize,
                                     self._stride)
     else:
         kernel_diff_buffer = np.zeros_like(kernel_diff)
         for i in range(bottom_data.shape[0]):
             # although it is a backward layer, we still need to compute
             # the intermediate results using forward calls.
             wrapper.im2col_forward(padded_data[i:i + 1], col_data,
                                    self._ksize, self._stride)
             blasdot.dot_firstdims(col_data,
                                   top_diff[i],
                                   out=kernel_diff_buffer)
             kernel_diff += kernel_diff_buffer
             if propagate_down:
                 blasdot.dot_lastdim(top_diff[i],
                                     self._kernels.data().T,
                                     out=col_diff)
                 # im2col backward
                 wrapper.im2col_backward(padded_diff[i:i + 1], col_diff,
                                         self._ksize, self._stride)
     # finally, copy results to the bottom diff.
     if propagate_down:
         if self._pad_size != 0:
             bottom_diff[:] = padded_diff[:, self._pad_size:-self._pad_size,
                                          self._pad_size:-self._pad_size]
     # finally, add the regularization term
     if self._reg is not None:
         return self._reg.reg(self._kernels, bottom_data.shape[0])
     else:
         return 0.