Beispiel #1
0
    def __init__(self, input_size, output_size, active_func=None,
                 W=None, b=None,
                 through=False):
        super(FullConnLayer, self).__init__()

        self._active_func = active_func
        self._through = through

        if W is None:
            W_bound = numpy.sqrt(6.0 / (input_size + output_size))

            if active_func == actfuncs.sigmoid: W_bound *= 4

            init_W = numpy.asarray(numpy_rng.uniform(
                low=-W_bound, high=W_bound,
                size=(input_size, output_size)), dtype=theano.config.floatX)

            self._W = theano.shared(value=init_W, borrow=True)
        else:
            self._W = W

        if b is None:
            init_b = numpy.zeros(output_size, dtype=theano.config.floatX)

            self._b = theano.shared(value=init_b, borrow=True)
        else:
            self._b = b

        self._params = [self._W, self._b]
Beispiel #2
0
    def __init__(self, filter_shape, pool_shape,
                 image_shape=None, active_func=None,
                 flatten=False, through=False):
        """Initialize the convolutional and max-pooling layer

        Args:
            filter_shape: 4D-tensor, (n_filters, n_channels, n_rows, n_cols)
            pool_shape: 2D-tensor, (n_rows, n_cols)
            image_shape: None if the input is always 4D-tensor, (n_images,
                n_channels, n_rows, n_cols). If the input images are represented
                as vectors, then a 3D-tensor, (n_channels, n_rows, n_cols) is
                required.
            active_func: Active function of this layer
            flatten: True if the output image should be flattened as a vector
            through: True if the output should be passed through
        """

        super(ConvPoolLayer, self).__init__()

        self._filter_shape = filter_shape
        self._pool_shape = pool_shape
        self._image_shape = image_shape
        self._active_func = active_func
        self._flatten = flatten
        self._through = through

        fan_in = numpy.prod(filter_shape[1:])
        fan_out = filter_shape[0] * numpy.prod(filter_shape[2:]) / numpy.prod(pool_shape)

        W_bound = numpy.sqrt(6.0 / (fan_in + fan_out))

        if active_func == actfuncs.sigmoid: W_bound *= 4

        init_W = numpy.asarray(numpy_rng.uniform(
            low=-W_bound, high=W_bound,
            size=filter_shape), dtype=theano.config.floatX)

        self._W = theano.shared(value=init_W, borrow=True)

        init_b = numpy.zeros((filter_shape[0],), dtype=theano.config.floatX)

        self._b = theano.shared(value=init_b, borrow=True)

        self._params = [self._W, self._b]