Example #1
0
 def backward(self, bottom, top, propagate_down):
     """Runs the backward pass."""
     top_diff = top[0].diff()
     bottom_data = bottom[0].data()
     kernel_diff = self._kernels.init_diff()
     kernel_diff_buffer = np.zeros_like(kernel_diff)
     col_diff = self._col.init_diff()
     if propagate_down:
         bottom_diff = bottom[0].init_diff()
     if self._mode != 'valid':
         pad_diff = self._padded.init_diff()
     for i in range(bottom_data.shape[0]):
         if self._mode != 'valid':
             # do padding
             pad_diff[0, self._border:-self._border,
                      self._border:-self._border] = top_diff[i]
         else:
             pad_diff = top_diff[i:i+1].view()
         # run im2col
         wrapper.im2col_forward(pad_diff, col_diff, self._ksize,
                                self._stride)
         blasdot.dot_firstdims(bottom_data[i], col_diff,
                              out=kernel_diff_buffer)
         kernel_diff += kernel_diff_buffer
         if propagate_down:
             # compute final gradient
             blasdot.dot_lastdim(col_diff, self._kernels.data().T,
                                 out=bottom_diff[i])
     # finally, add the regularization term
     if self._reg is not None:
         return self._reg.reg(self._kernels, bottom_data.shape[0])
     else:
         return 0.
Example #2
0
 def backward(self, bottom, top, propagate_down):
     """Runs the backward pass."""
     top_diff = top[0].diff()
     padded_data = self._padded.data()
     col_data = self._col.data()
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     kernel_diff = self._kernels.init_diff()
     if self._has_bias:
         bias_diff = self._bias.init_diff()
         # bias diff is fairly easy to compute: just sum over all other
         # dimensions
         np.sum(top_diff.reshape(top_diff.size / top_diff.shape[-1],
                                 top_diff.shape[-1]),
                axis=0, out=bias_diff)
     if propagate_down:
         bottom_diff = bottom[0].init_diff(setzero=False)
         col_diff = self._col.init_diff()
         if self._pad_size == 0:
             padded_diff = self._padded.mirror_diff(bottom_diff)
         else:
             padded_diff = self._padded.init_diff(setzero=False)
     if self._large_mem:
         # we have the col_data all pre-stored, making things more efficient.
         blasdot.dot_firstdims(col_data, top_diff, out=kernel_diff)
         if propagate_down:
             blasdot.dot_lastdim(top_diff, self._kernels.data().T,
                                 out=col_diff)
             wrapper.im2col_backward(padded_diff, col_diff,
                                 self._ksize, self._stride)
     else:
         kernel_diff_buffer = np.zeros_like(kernel_diff)
         for i in range(bottom_data.shape[0]):
             # although it is a backward layer, we still need to compute
             # the intermediate results using forward calls.
             wrapper.im2col_forward(padded_data[i:i+1], col_data,
                                    self._ksize, self._stride)
             blasdot.dot_firstdims(col_data, top_diff[i],
                                  out=kernel_diff_buffer)
             kernel_diff += kernel_diff_buffer
             if propagate_down:
                 blasdot.dot_lastdim(top_diff[i], self._kernels.data().T,
                                     out=col_diff)
                 # im2col backward
                 wrapper.im2col_backward(padded_diff[i:i+1], col_diff,
                                         self._ksize, self._stride)
     # finally, copy results to the bottom diff.
     if propagate_down:
         if self._pad_size != 0:
             bottom_diff[:] = padded_diff[:,
                                          self._pad_size:-self._pad_size,
                                          self._pad_size:-self._pad_size]
     # finally, add the regularization term
     if self._reg is not None:
         return self._reg.reg(self._kernels, bottom_data.shape[0])
     else:
         return 0.
Example #3
0
 def forward(self, bottom, top):
     """Runs the forward pass."""
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     if not self._kernels.has_data():
         # initialize the kernels
         self._kernels.init_data(
             (self._ksize * self._ksize * bottom_data.shape[-1],
              self._num_kernels), bottom_data.dtype)
         if self._has_bias:
             self._bias.init_data((self._num_kernels, ), bottom_data.dtype)
     # pad the data
     if self._pad_size == 0:
         padded_data = self._padded.mirror(bottom_data)
     else:
         padded_data = self._padded.init_data(
             (bottom_data.shape[0], bottom_data.shape[1] +
              self._pad_size * 2, bottom_data.shape[2] + self._pad_size * 2,
              bottom_data.shape[3]), bottom_data.dtype)
         padded_data[:, self._pad_size:-self._pad_size,
                     self._pad_size:-self._pad_size] = bottom_data
     # initialize self._col
     if self._large_mem:
         col_data_num = bottom_data.shape[0]
     else:
         col_data_num = 1
     col_data = self._col.init_data(
         (col_data_num,
          (padded_data.shape[1] - self._ksize) / self._stride + 1,
          (padded_data.shape[2] - self._ksize) / self._stride + 1,
          padded_data.shape[3] * self._ksize * self._ksize),
         padded_data.dtype,
         setdata=False)
     # initialize top data
     top_data = top[0].init_data((bottom_data.shape[0], col_data.shape[1],
                                  col_data.shape[2], self._num_kernels),
                                 dtype=bottom_data.dtype,
                                 setdata=False)
     # process data individually
     if self._large_mem:
         wrapper.im2col_forward(padded_data, col_data, self._ksize,
                                self._stride)
         blasdot.dot_lastdim(col_data, self._kernels.data(), out=top_data)
     else:
         for i in range(bottom_data.shape[0]):
             # call im2col individually
             wrapper.im2col_forward(padded_data[i:i + 1], col_data,
                                    self._ksize, self._stride)
             blasdot.dot_lastdim(col_data,
                                 self._kernels.data(),
                                 out=top_data[i])
     if self._has_bias:
         top_data += self._bias.data()
     return
Example #4
0
 def forward(self, bottom, top):
     """Runs the forward pass."""
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     if not self._kernels.has_data():
         # initialize the kernels
         self._kernels.init_data(
             (self._ksize * self._ksize * bottom_data.shape[-1],
              self._num_kernels),
             bottom_data.dtype)
         if self._has_bias:
             self._bias.init_data((self._num_kernels,), bottom_data.dtype)
     # pad the data
     if self._pad_size == 0:
         padded_data = self._padded.mirror(bottom_data)
     else:
         padded_data = self._padded.init_data(
                 (bottom_data.shape[0],
                  bottom_data.shape[1] + self._pad_size * 2,
                  bottom_data.shape[2] + self._pad_size * 2,
                  bottom_data.shape[3]),
                 bottom_data.dtype)
         padded_data[:, self._pad_size:-self._pad_size,
                     self._pad_size:-self._pad_size] = bottom_data
     # initialize self._col
     if self._large_mem:
         col_data_num = bottom_data.shape[0]
     else:
         col_data_num = 1
     col_data = self._col.init_data(
         (col_data_num,
          (padded_data.shape[1] - self._ksize) / self._stride + 1,
          (padded_data.shape[2] - self._ksize) / self._stride + 1,
          padded_data.shape[3] * self._ksize * self._ksize),
         padded_data.dtype, setdata=False)
     # initialize top data
     top_data = top[0].init_data(
         (bottom_data.shape[0], col_data.shape[1], col_data.shape[2],
          self._num_kernels), dtype=bottom_data.dtype, setdata=False)
     # process data individually
     if self._large_mem:
         wrapper.im2col_forward(padded_data, col_data,
                                self._ksize, self._stride)
         blasdot.dot_lastdim(col_data, self._kernels.data(), out=top_data)
     else:
         for i in range(bottom_data.shape[0]):
             # call im2col individually
             wrapper.im2col_forward(padded_data[i:i+1], col_data,
                                    self._ksize, self._stride)
             blasdot.dot_lastdim(col_data, self._kernels.data(),
                                 out=top_data[i])
     if self._has_bias:
         top_data += self._bias.data()
     return
Example #5
0
 def forward(self, bottom, top):
     """Runs the forward pass."""
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     if not self._kernels.has_data():
         # initialize the kernels
         self._kernels.init_data(
             (bottom_data.shape[-1],
              self._ksize * self._ksize * self._num_channels),
             bottom_data.dtype)
     # initialize the buffers.
     self._col.init_data((1, bottom_data.shape[1], bottom_data.shape[2],
                          self._kernels.data().shape[1]),
                         dtype = bottom_data.dtype)
     pad_height = self._ksize + (bottom_data.shape[1] - 1) \
             * self._stride
     pad_width = self._ksize + (bottom_data.shape[2] - 1) \
             * self._stride
     if self._mode != 'valid':
         padded_data = self._padded.init_data(
             (1, pad_height, pad_width, self._num_channels),
             dtype = bottom_data.dtype)
     top_data = top[0].init_data(
         (bottom_data.shape[0], pad_height - self._border * 2,
          pad_width - self._border * 2, self._num_channels),
         dtype=bottom_data.dtype)
     # process data individually
     for i in range(bottom_data.shape[0]):
         # first, compute the convolution as a gemm operation
         blasdot.dot_lastdim(bottom_data[i:i+1], self._kernels.data(),
                             out=self._col.data())
         if self._mode != 'valid':
         # do col2im
             wrapper.im2col_backward(padded_data, self._col.data(),
                            self._ksize, self._stride)
             top_data[i] = padded_data[0, self._border:-self._border,
                                       self._border:-self._border]
         else:
             wrapper.im2col_backward(top_data[i:i+1], self._col.data(),
                                     self._ksize, self._stride)
     return
Example #6
0
 def backward(self, bottom, top, propagate_down):
     """Runs the backward pass."""
     top_diff = top[0].diff()
     padded_data = self._padded.data()
     col_data = self._col.data()
     bottom_data = bottom[0].data()
     if bottom_data.ndim != 4:
         raise ValueError('Bottom data should be a 4-dim tensor.')
     kernel_diff = self._kernels.init_diff()
     if self._has_bias:
         bias_diff = self._bias.init_diff()
         # bias diff is fairly easy to compute: just sum over all other
         # dimensions
         np.sum(top_diff.reshape(top_diff.size / top_diff.shape[-1],
                                 top_diff.shape[-1]),
                axis=0,
                out=bias_diff)
     if propagate_down:
         bottom_diff = bottom[0].init_diff(setzero=False)
         col_diff = self._col.init_diff()
         if self._pad_size == 0:
             padded_diff = self._padded.mirror_diff(bottom_diff)
         else:
             padded_diff = self._padded.init_diff(setzero=False)
     if self._large_mem:
         # we have the col_data all pre-stored, making things more efficient.
         blasdot.dot_firstdims(col_data, top_diff, out=kernel_diff)
         if propagate_down:
             blasdot.dot_lastdim(top_diff,
                                 self._kernels.data().T,
                                 out=col_diff)
             wrapper.im2col_backward(padded_diff, col_diff, self._ksize,
                                     self._stride)
     else:
         kernel_diff_buffer = np.zeros_like(kernel_diff)
         for i in range(bottom_data.shape[0]):
             # although it is a backward layer, we still need to compute
             # the intermediate results using forward calls.
             wrapper.im2col_forward(padded_data[i:i + 1], col_data,
                                    self._ksize, self._stride)
             blasdot.dot_firstdims(col_data,
                                   top_diff[i],
                                   out=kernel_diff_buffer)
             kernel_diff += kernel_diff_buffer
             if propagate_down:
                 blasdot.dot_lastdim(top_diff[i],
                                     self._kernels.data().T,
                                     out=col_diff)
                 # im2col backward
                 wrapper.im2col_backward(padded_diff[i:i + 1], col_diff,
                                         self._ksize, self._stride)
     # finally, copy results to the bottom diff.
     if propagate_down:
         if self._pad_size != 0:
             bottom_diff[:] = padded_diff[:, self._pad_size:-self._pad_size,
                                          self._pad_size:-self._pad_size]
     # finally, add the regularization term
     if self._reg is not None:
         return self._reg.reg(self._kernels, bottom_data.shape[0])
     else:
         return 0.