Exemple #1
0
def test_multi_convolutional_feature_map_fprop():
    cplane1 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    cplane2 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    sigmoid = TanhSigmoid((16, 16), bias=True)
    mfmap = MultiConvolutionalFeatureMap((5, 5), (20, 20), 2)
    mfmap.initialize()
    cplane1.params[:] = mfmap.planes[0].params
    cplane2.params[:] = mfmap.planes[1].params
    sigmoid.params[:] = mfmap.params[0:1]
    inputs1 = random.normal(size=(20, 20))
    inputs2 = random.normal(size=(20, 20))
    control = sigmoid.fprop(cplane1.fprop(inputs1) + cplane2.fprop(inputs2))
    mfmap_out = mfmap.fprop([inputs1, inputs2])
    assert_array_almost_equal(control, mfmap_out)
Exemple #2
0
 def __init__(self,
              fsize,
              imsize,
              num,
              inner=TANH_INNER,
              outer=TANH_OUTER,
              **kwargs):
     """
     Construct a MultiConvolutionalFeatureMap.
     
     All parameters are as in ConvolutionalFeatureMap; num is the
     number of planes this MCFM has (and the length of the list
     that fprop, bprop and grad expect for the "inputs" argument).
     """
     # Filter size times the number of filters, plus a bias
     filter_elems = np.prod(fsize)
     nparams = filter_elems * num + 1
     outsize = ConvolutionalPlane.outsize_from_imsize_and_fsize(
         imsize, fsize)
     super(MultiConvolutionalFeatureMap, self).__init__(outsize,
                                                        True,
                                                        nparams=nparams,
                                                        **kwargs)
     self.planes = []
     assert num > 0
     for index in xrange(num):
         param_range = slice(1 + (filter_elems * index),
                             1 + (filter_elems * (index + 1)))
         thisparam = self.params[param_range]
         thisgrad = self._grad[param_range]
         thisplane = ConvolutionalPlane(fsize,
                                        imsize,
                                        params=thisparam,
                                        grad=thisgrad,
                                        bias=False)
         self.planes.append(thisplane)
     self._out_array = np.empty(outsize)
Exemple #3
0
def test_multi_convolutional_feature_map_fprop():
    cplane1 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    cplane2 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    sigmoid = TanhSigmoid((16, 16), bias=True)
    mfmap = MultiConvolutionalFeatureMap((5, 5), (20, 20), 2)
    mfmap.initialize()
    cplane1.params[:] = mfmap.planes[0].params
    cplane2.params[:] = mfmap.planes[1].params
    sigmoid.params[:] = mfmap.params[0:1]
    inputs1 = random.normal(size=(20, 20))
    inputs2 = random.normal(size=(20, 20))
    control = sigmoid.fprop(cplane1.fprop(inputs1) + cplane2.fprop(inputs2))
    mfmap_out = mfmap.fprop([inputs1, inputs2])
    assert_array_almost_equal(control, mfmap_out)
Exemple #4
0
 def __init__(self, fsize, imsize, num, inner=TANH_INNER, outer=TANH_OUTER,
             **kwargs):
     """
     Construct a MultiConvolutionalFeatureMap.
     
     All parameters are as in ConvolutionalFeatureMap; num is the
     number of planes this MCFM has (and the length of the list
     that fprop, bprop and grad expect for the "inputs" argument).
     """
     # Filter size times the number of filters, plus a bias
     filter_elems = np.prod(fsize)
     nparams = filter_elems * num + 1
     outsize = ConvolutionalPlane.outsize_from_imsize_and_fsize(
         imsize,
         fsize
     )
     super(MultiConvolutionalFeatureMap, self).__init__(
         outsize,
         True,
         nparams=nparams,
         **kwargs
     )
     self.planes = []
     assert num > 0
     for index in xrange(num):
         param_range = slice(1 + (filter_elems * index), 
                             1 + (filter_elems * (index + 1)))
         thisparam = self.params[param_range]
         thisgrad = self._grad[param_range]
         thisplane = ConvolutionalPlane(fsize, imsize, 
             params=thisparam,
             grad=thisgrad,
             bias=False
         )
         self.planes.append(thisplane)
     self._out_array = np.empty(outsize)
Exemple #5
0
def test_convolutional_plane_params_gradient_no_bias():
    module = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    module.initialize()
    inputs = random.normal(size=(20, 20))
    params = random.normal(size=len(module.params))
    check_parameter_gradient(module, inputs, params)
Exemple #6
0
 def __init__(self, fsize, imsize):
     """Construct a feature map with given filter size and image size."""
     super(NaiveConvolutionalFeatureMap, self).__init__()
     self.convolution = ConvolutionalPlane(fsize, imsize)
     self.nonlinearity = TanhSigmoid(self.convolution.outsize)
Exemple #7
0
class NaiveConvolutionalFeatureMap(BaseBPropComponent):
    """
    One way to implement a standard feature map that takes input from a 
    single lower-level image. This serves two purposes: to demonstrate 
    how to write new learning modules by composing two existing modules,
    and to serve as a sanity check for the more efficient implementation,
    ConvolutionalFeatureMap.
    
    Has, as members, a ConvolutionalPlane with standard bias configuration
    and a TanhSigmoid object that does the squashing.
    
    This is a little wasteful since each of the modules has separate output
    array members. See FeatureMap for a slightly more memory efficient 
    implementation that uses subclassing.
    """
    def __init__(self, fsize, imsize):
        """Construct a feature map with given filter size and image size."""
        super(NaiveConvolutionalFeatureMap, self).__init__()
        self.convolution = ConvolutionalPlane(fsize, imsize)
        self.nonlinearity = TanhSigmoid(self.convolution.outsize)

    def fprop(self, inputs):
        """Forward propagate input through this module."""
        return self.nonlinearity.fprop(self.convolution.fprop(inputs))

    def bprop(self, dout, inputs):
        """
        Backpropagate derivatives through this module to get derivatives
        with respect to this module's input.
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.bprop(squash_derivs, inputs)

    def grad(self, dout, inputs):
        """
        Gradient of the error with respect to the parameters of this module.
        
        Parameters:
            * dout -- derivative of the outputs of this module
                (will be size of input - size of filter + 1, elementwise)
            * inputs -- inputs to this module
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.grad(squash_derivs, inputs)

    def initialize(self):
        """Initialize the module's weights."""
        self.convolution.initialize()

    @property
    def outsize(self):
        """Output size."""
        return self.convolution.outsize

    @property
    def imsize(self):
        """Image input size."""
        return self.convolution.imsize

    @property
    def fsize(self):
        """Filter shape."""
        return self.convolution.filter.shape
Exemple #8
0
 def __init__(self, fsize, imsize):
     """Construct a feature map with given filter size and image size."""
     super(NaiveConvolutionalFeatureMap, self).__init__()
     self.convolution = ConvolutionalPlane(fsize, imsize)
     self.nonlinearity = TanhSigmoid(self.convolution.outsize)
Exemple #9
0
class NaiveConvolutionalFeatureMap(BaseBPropComponent):
    """
    One way to implement a standard feature map that takes input from a 
    single lower-level image. This serves two purposes: to demonstrate 
    how to write new learning modules by composing two existing modules,
    and to serve as a sanity check for the more efficient implementation,
    ConvolutionalFeatureMap.
    
    Has, as members, a ConvolutionalPlane with standard bias configuration
    and a TanhSigmoid object that does the squashing.
    
    This is a little wasteful since each of the modules has separate output
    array members. See FeatureMap for a slightly more memory efficient 
    implementation that uses subclassing.
    """
    def __init__(self, fsize, imsize):
        """Construct a feature map with given filter size and image size."""
        super(NaiveConvolutionalFeatureMap, self).__init__()
        self.convolution = ConvolutionalPlane(fsize, imsize)
        self.nonlinearity = TanhSigmoid(self.convolution.outsize)
    
    def fprop(self, inputs):
        """Forward propagate input through this module."""
        return self.nonlinearity.fprop(self.convolution.fprop(inputs))
    
    def bprop(self, dout, inputs):
        """
        Backpropagate derivatives through this module to get derivatives
        with respect to this module's input.
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.bprop(squash_derivs, inputs)
    
    def grad(self, dout, inputs):
        """
        Gradient of the error with respect to the parameters of this module.
        
        Parameters:
            * dout -- derivative of the outputs of this module
                (will be size of input - size of filter + 1, elementwise)
            * inputs -- inputs to this module
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.grad(squash_derivs, inputs)
    
    def initialize(self):
        """Initialize the module's weights."""
        self.convolution.initialize()
    
    @property
    def outsize(self):
        """Output size."""
        return self.convolution.outsize
    
    @property
    def imsize(self):
        """Image input size."""
        return self.convolution.imsize
    
    @property
    def fsize(self):
        """Filter shape."""
        return self.convolution.filter.shape
Exemple #10
0
def test_convolutional_plane_params_gradient_no_bias():
    module = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    module.initialize()
    inputs = random.normal(size=(20, 20))
    params = random.normal(size=len(module.params))
    check_parameter_gradient(module, inputs, params)