コード例 #1
0
ファイル: tests.py プロジェクト: fitrialif/convolupy
def test_multi_convolutional_feature_map_fprop():
    cplane1 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    cplane2 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    sigmoid = TanhSigmoid((16, 16), bias=True)
    mfmap = MultiConvolutionalFeatureMap((5, 5), (20, 20), 2)
    mfmap.initialize()
    cplane1.params[:] = mfmap.planes[0].params
    cplane2.params[:] = mfmap.planes[1].params
    sigmoid.params[:] = mfmap.params[0:1]
    inputs1 = random.normal(size=(20, 20))
    inputs2 = random.normal(size=(20, 20))
    control = sigmoid.fprop(cplane1.fprop(inputs1) + cplane2.fprop(inputs2))
    mfmap_out = mfmap.fprop([inputs1, inputs2])
    assert_array_almost_equal(control, mfmap_out)
コード例 #2
0
ファイル: tests.py プロジェクト: pombredanne/convolupy
def test_multi_convolutional_feature_map_fprop():
    cplane1 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    cplane2 = ConvolutionalPlane((5, 5), (20, 20), bias=False)
    sigmoid = TanhSigmoid((16, 16), bias=True)
    mfmap = MultiConvolutionalFeatureMap((5, 5), (20, 20), 2)
    mfmap.initialize()
    cplane1.params[:] = mfmap.planes[0].params
    cplane2.params[:] = mfmap.planes[1].params
    sigmoid.params[:] = mfmap.params[0:1]
    inputs1 = random.normal(size=(20, 20))
    inputs2 = random.normal(size=(20, 20))
    control = sigmoid.fprop(cplane1.fprop(inputs1) + cplane2.fprop(inputs2))
    mfmap_out = mfmap.fprop([inputs1, inputs2])
    assert_array_almost_equal(control, mfmap_out)
コード例 #3
0
ファイル: tests.py プロジェクト: pombredanne/convolupy
 def test_sigmoid_initialize_raises_if_no_parameters(self):
     foo = TanhSigmoid((5, 5))
     foo.initialize()
コード例 #4
0
 def __init__(self, fsize, imsize):
     """Construct a feature map with given filter size and image size."""
     super(NaiveConvolutionalFeatureMap, self).__init__()
     self.convolution = ConvolutionalPlane(fsize, imsize)
     self.nonlinearity = TanhSigmoid(self.convolution.outsize)
コード例 #5
0
class NaiveConvolutionalFeatureMap(BaseBPropComponent):
    """
    One way to implement a standard feature map that takes input from a 
    single lower-level image. This serves two purposes: to demonstrate 
    how to write new learning modules by composing two existing modules,
    and to serve as a sanity check for the more efficient implementation,
    ConvolutionalFeatureMap.
    
    Has, as members, a ConvolutionalPlane with standard bias configuration
    and a TanhSigmoid object that does the squashing.
    
    This is a little wasteful since each of the modules has separate output
    array members. See FeatureMap for a slightly more memory efficient 
    implementation that uses subclassing.
    """
    def __init__(self, fsize, imsize):
        """Construct a feature map with given filter size and image size."""
        super(NaiveConvolutionalFeatureMap, self).__init__()
        self.convolution = ConvolutionalPlane(fsize, imsize)
        self.nonlinearity = TanhSigmoid(self.convolution.outsize)

    def fprop(self, inputs):
        """Forward propagate input through this module."""
        return self.nonlinearity.fprop(self.convolution.fprop(inputs))

    def bprop(self, dout, inputs):
        """
        Backpropagate derivatives through this module to get derivatives
        with respect to this module's input.
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.bprop(squash_derivs, inputs)

    def grad(self, dout, inputs):
        """
        Gradient of the error with respect to the parameters of this module.
        
        Parameters:
            * dout -- derivative of the outputs of this module
                (will be size of input - size of filter + 1, elementwise)
            * inputs -- inputs to this module
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.grad(squash_derivs, inputs)

    def initialize(self):
        """Initialize the module's weights."""
        self.convolution.initialize()

    @property
    def outsize(self):
        """Output size."""
        return self.convolution.outsize

    @property
    def imsize(self):
        """Image input size."""
        return self.convolution.imsize

    @property
    def fsize(self):
        """Filter shape."""
        return self.convolution.filter.shape
コード例 #6
0
ファイル: maps.py プロジェクト: alexgarry/convolupy
 def __init__(self, fsize, imsize):
     """Construct a feature map with given filter size and image size."""
     super(NaiveConvolutionalFeatureMap, self).__init__()
     self.convolution = ConvolutionalPlane(fsize, imsize)
     self.nonlinearity = TanhSigmoid(self.convolution.outsize)
コード例 #7
0
ファイル: maps.py プロジェクト: alexgarry/convolupy
class NaiveConvolutionalFeatureMap(BaseBPropComponent):
    """
    One way to implement a standard feature map that takes input from a 
    single lower-level image. This serves two purposes: to demonstrate 
    how to write new learning modules by composing two existing modules,
    and to serve as a sanity check for the more efficient implementation,
    ConvolutionalFeatureMap.
    
    Has, as members, a ConvolutionalPlane with standard bias configuration
    and a TanhSigmoid object that does the squashing.
    
    This is a little wasteful since each of the modules has separate output
    array members. See FeatureMap for a slightly more memory efficient 
    implementation that uses subclassing.
    """
    def __init__(self, fsize, imsize):
        """Construct a feature map with given filter size and image size."""
        super(NaiveConvolutionalFeatureMap, self).__init__()
        self.convolution = ConvolutionalPlane(fsize, imsize)
        self.nonlinearity = TanhSigmoid(self.convolution.outsize)
    
    def fprop(self, inputs):
        """Forward propagate input through this module."""
        return self.nonlinearity.fprop(self.convolution.fprop(inputs))
    
    def bprop(self, dout, inputs):
        """
        Backpropagate derivatives through this module to get derivatives
        with respect to this module's input.
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.bprop(squash_derivs, inputs)
    
    def grad(self, dout, inputs):
        """
        Gradient of the error with respect to the parameters of this module.
        
        Parameters:
            * dout -- derivative of the outputs of this module
                (will be size of input - size of filter + 1, elementwise)
            * inputs -- inputs to this module
        """
        squash_inputs = self.convolution.fprop(inputs)
        squash_derivs = self.nonlinearity.bprop(dout, squash_inputs)
        return self.convolution.grad(squash_derivs, inputs)
    
    def initialize(self):
        """Initialize the module's weights."""
        self.convolution.initialize()
    
    @property
    def outsize(self):
        """Output size."""
        return self.convolution.outsize
    
    @property
    def imsize(self):
        """Image input size."""
        return self.convolution.imsize
    
    @property
    def fsize(self):
        """Filter shape."""
        return self.convolution.filter.shape
コード例 #8
0
ファイル: tests.py プロジェクト: fitrialif/convolupy
 def test_sigmoid_initialize_raises_if_no_parameters(self):
     foo = TanhSigmoid((5, 5))
     foo.initialize()