Ejemplo n.º 1
0
    def __init__(self, input_layer, n_filters, filter_size, weights_std, init_bias_value, stride=1, nonlinearity=layers.rectify, dropout=0., partial_sum=None, untie_biases=False):
        """
        This is a convolution which is circular in the 0-direction, and valid in the 1-direction.

        n_filters should be a multiple of 16
        """
        self.input_layer = input_layer
        self.n_filters = n_filters
        self.filter_size = filter_size
        self.weights_std = np.float32(weights_std)
        self.init_bias_value = np.float32(init_bias_value)
        self.stride = stride
        self.nonlinearity = nonlinearity
        self.dropout = dropout
        self.partial_sum = partial_sum
        self.untie_biases = untie_biases
        # if untie_biases == True, each position in the output map has its own bias (as opposed to having the same bias everywhere for a given filter)
        self.mb_size = self.input_layer.mb_size

        self.input_shape = self.input_layer.get_output_shape()

        self.filter_shape = (self.input_shape[0], filter_size, filter_size, n_filters)

        self.W = layers.shared_single(4) # theano.shared(np.random.randn(*self.filter_shape).astype(np.float32) * self.weights_std)

        if self.untie_biases:
            self.b = layers.shared_single(3)
        else:
            self.b = layers.shared_single(1) # theano.shared(np.ones(n_filters).astype(np.float32) * self.init_bias_value)

        self.params = [self.W, self.b]
        self.bias_params = [self.b]
        self.reset_params()

        self.filter_acts_op = FilterActs(stride=self.stride, partial_sum=self.partial_sum)
Ejemplo n.º 2
0
    def __init__(self, input_layer, n_filters, filter_size, weights_std, init_bias_value, stride=1, nonlinearity=layers.rectify, dropout=0., partial_sum=None, untie_biases=False):
        """
        This is a convolution which is circular in the 0-direction, and valid in the 1-direction.

        n_filters should be a multiple of 16
        """
        self.input_layer = input_layer
        self.n_filters = n_filters
        self.filter_size = filter_size
        self.weights_std = np.float32(weights_std)
        self.init_bias_value = np.float32(init_bias_value)
        self.stride = stride
        self.nonlinearity = nonlinearity
        self.dropout = dropout
        self.partial_sum = partial_sum
        self.untie_biases = untie_biases
        # if untie_biases == True, each position in the output map has its own bias (as opposed to having the same bias everywhere for a given filter)
        self.mb_size = self.input_layer.mb_size

        self.input_shape = self.input_layer.get_output_shape()

        self.filter_shape = (self.input_shape[0], filter_size, filter_size, n_filters)

        self.W = layers.shared_single(4) # theano.shared(np.random.randn(*self.filter_shape).astype(np.float32) * self.weights_std)

        if self.untie_biases:
            self.b = layers.shared_single(3)
        else:
            self.b = layers.shared_single(1) # theano.shared(np.ones(n_filters).astype(np.float32) * self.init_bias_value)

        self.params = [self.W, self.b]
        self.bias_params = [self.b]
        self.reset_params()

        self.filter_acts_op = FilterActs(stride=self.stride, partial_sum=self.partial_sum)
Ejemplo n.º 3
0
    def __init__(self, input_layer, mirror_layer, nonlinearity=None):
        """
        Only the valid border mode is supported.

        n_filters should be a multiple of 16
        """

        self.mirror_layer = mirror_layer

        self.input_layer = input_layer
        self.input_shape = self.input_layer.get_output_shape()
        n_filters = self.input_shape[0]

        if nonlinearity:
            self.nonlinearity = nonlinearity
        else:
            self.nonlinearity = mirror_layer.nonlinearity

        self.n_channels = mirror_layer.n_channels
        self.n_filters = mirror_layer.n_filters
        self.filter_size = mirror_layer.filter_size
        self.weights_std = mirror_layer.weights_std
        self.init_bias_value = mirror_layer.init_bias_value
        self.stride = mirror_layer.stride
        self.dropout = mirror_layer.dropout
        self.partial_sum = mirror_layer.partial_sum
        self.pad = mirror_layer.pad
        self.untie_biases = mirror_layer.untie_biases

        self.mb_size = self.input_layer.mb_size

        self.filter_shape = mirror_layer.filter_shape

        self.trainable = False
        self.W = layers.shared_single(4)

        if self.untie_biases:
            self.b = layers.shared_single(3)
        else:
            self.b = layers.shared_single(1)

        # self.params = [self.W, self.b]
        self.params = [self.W, self.b]
        self.bias_params = [self.b]

        self.data_order = layers.data_order.type2

        assert (len(self.input_layer.get_output_shape()) == 4), \
            'Input must have 4 dimensions.'

        assert (self.input_layer.data_order == self.data_order), \
            'Input data order does not match this layer\'s data order.'

        self.reset_params()

        self.image_acts_op = ImageActs(stride=self.stride,
                                       partial_sum=self.partial_sum,
                                       pad=self.pad)
Ejemplo n.º 4
0
    def __init__(self,
                 input_layer,
                 n_filters,
                 filter_size,
                 weights_std,
                 stride=1,
                 nonlinearity=layers.rectify,
                 dropout=0.,
                 partial_sum=None,
                 pad=0,
                 trainable=True):
        """
        Only the valid border mode is supported.

        n_filters should be a multiple of 16
        """
        self.input_layer = input_layer
        self.input_shape = self.input_layer.get_output_shape()
        self.n_filters = n_filters
        n_channels = self.input_shape[0]
        self.n_channels = n_channels
        self.filter_size = filter_size
        self.weights_std = numpy.float32(weights_std)
        self.stride = stride
        self.nonlinearity = nonlinearity
        self.dropout = dropout
        self.partial_sum = partial_sum
        self.pad = pad
        self.mb_size = self.input_layer.mb_size

        self.data_order = layers.data_order.type2

        assert (len(self.input_layer.get_output_shape()) == 4), \
            'Input must have 4 dimensions.'

        assert (self.input_layer.data_order == self.data_order), \
            'Input data order does not match this layer\'s data order.'

        self.filter_shape = (n_channels, filter_size, filter_size, n_filters)

        self.trainable = trainable
        self.W = layers.shared_single(4)

        self.params = [self.W]
        self.reset_params()

        self.filter_acts_op = FilterActs(stride=self.stride,
                                         partial_sum=self.partial_sum,
                                         pad=self.pad)
Ejemplo n.º 5
0
    def __init__(self,
                 input_layer,
                 n_filters,
                 filter_size,
                 weights_std,
                 stride=1,
                 nonlinearity=layers.rectify,
                 dropout=0.,
                 partial_sum=None,
                 pad=0,
                 trainable=True):
        """
        Only the valid border mode is supported.

        n_filters should be a multiple of 16
        """
        self.input_layer = input_layer
        self.input_shape = self.input_layer.get_output_shape()
        self.n_filters = n_filters
        n_channels = self.input_shape[0]
        self.n_channels = n_channels
        self.filter_size = filter_size
        self.weights_std = numpy.float32(weights_std)
        self.stride = stride
        self.nonlinearity = nonlinearity
        self.dropout = dropout
        self.partial_sum = partial_sum
        self.pad = pad
        self.mb_size = self.input_layer.mb_size

        self.data_order = layers.data_order.type2

        assert (len(self.input_layer.get_output_shape()) == 4), \
            'Input must have 4 dimensions.'

        assert (self.input_layer.data_order == self.data_order), \
            'Input data order does not match this layer\'s data order.'

        self.filter_shape = (n_channels, filter_size, filter_size, n_filters)

        self.trainable = trainable
        self.W = layers.shared_single(4)

        self.params = [self.W]
        self.reset_params()

        self.filter_acts_op = FilterActs(stride=self.stride,
                                         partial_sum=self.partial_sum,
                                         pad=self.pad)
Ejemplo n.º 6
0
    def __init__(self,
                 input_layer,
                 n_filters,
                 filter_size,
                 weights_std,
                 init_bias_value,
                 stride=1,
                 nonlinearity=layers.rectify,
                 dropout=0.,
                 partial_sum=None,
                 pad=0,
                 untie_biases=False,
                 trainable=True):
        """
        Only the valid border mode is supported.

        n_filters should be a multiple of 16
        """
        self.input_layer = input_layer
        self.input_shape = self.input_layer.get_output_shape()
        self.n_filters = n_filters
        n_channels = self.input_shape[0]
        self.n_channels = n_channels
        self.filter_size = filter_size
        self.weights_std = numpy.float32(weights_std)
        self.init_bias_value = numpy.float32(init_bias_value)
        self.stride = stride
        self.nonlinearity = nonlinearity
        self.dropout = dropout
        self.partial_sum = partial_sum
        self.pad = pad
        self.untie_biases = untie_biases
        # if untie_biases == True, each position in the output map has its own
        # bias (as opposed to having the same bias everywhere for a given
        # filter)
        self.mb_size = self.input_layer.mb_size

        self.filter_shape = (n_channels, filter_size, filter_size, n_filters)

        self.trainable = trainable
        self.W = layers.shared_single(4)

        if self.untie_biases:
            self.b = layers.shared_single(3)
        else:
            self.b = layers.shared_single(1)

        self.params = [self.W, self.b]
        self.bias_params = [self.b]

        self.data_order = layers.data_order.type2

        assert (len(self.input_layer.get_output_shape()) == 4), \
            'Input must have 4 dimensions.'

        assert (self.input_layer.data_order == self.data_order), \
            'Input data order does not match this layer\'s data order.'

        self.reset_params()

        self.filter_acts_op = FilterActs(stride=self.stride,
                                         partial_sum=self.partial_sum,
                                         pad=self.pad)
Ejemplo n.º 7
0
    def __init__(self,
                 input_layer,
                 mirror_layer,
                 nonlinearity=None):
        """
        Only the valid border mode is supported.

        n_filters should be a multiple of 16
        """

        self.mirror_layer = mirror_layer

        self.input_layer = input_layer
        self.input_shape = self.input_layer.get_output_shape()
        n_filters = self.input_shape[0]

        if nonlinearity:
            self.nonlinearity = nonlinearity
        else:
            self.nonlinearity = mirror_layer.nonlinearity

        self.n_channels = mirror_layer.n_channels
        self.n_filters = mirror_layer.n_filters
        self.filter_size = mirror_layer.filter_size
        self.weights_std = mirror_layer.weights_std
        self.init_bias_value = mirror_layer.init_bias_value
        self.stride = mirror_layer.stride
        self.dropout = mirror_layer.dropout
        self.partial_sum = mirror_layer.partial_sum
        self.pad = mirror_layer.pad
        self.untie_biases = mirror_layer.untie_biases

        self.mb_size = self.input_layer.mb_size

        self.filter_shape = mirror_layer.filter_shape

        self.trainable = False
        self.W = layers.shared_single(4)

        if self.untie_biases:
            self.b = layers.shared_single(3)
        else:
            self.b = layers.shared_single(1)

        # self.params = [self.W, self.b]
        self.params = [self.W, self.b]
        self.bias_params = [self.b]

        self.data_order = layers.data_order.type2

        assert (len(self.input_layer.get_output_shape()) == 4), \
            'Input must have 4 dimensions.'

        assert (self.input_layer.data_order == self.data_order), \
            'Input data order does not match this layer\'s data order.'

        self.reset_params()

        self.image_acts_op = ImageActs(stride=self.stride,
                                       partial_sum=self.partial_sum,
                                       pad=self.pad)
Ejemplo n.º 8
0
    def __init__(self,
                 input_layer,
                 n_filters,
                 filter_size,
                 weights_std,
                 init_bias_value,
                 stride=1,
                 nonlinearity=layers.rectify,
                 dropout=0.,
                 partial_sum=None,
                 pad=0,
                 untie_biases=False,
                 trainable=True):
        """
        Only the valid border mode is supported.

        n_filters should be a multiple of 16
        """
        self.input_layer = input_layer
        self.input_shape = self.input_layer.get_output_shape()
        self.n_filters = n_filters
        n_channels = self.input_shape[0]
        self.n_channels = n_channels
        self.filter_size = filter_size
        self.weights_std = numpy.float32(weights_std)
        self.init_bias_value = numpy.float32(init_bias_value)
        self.stride = stride
        self.nonlinearity = nonlinearity
        self.dropout = dropout
        self.partial_sum = partial_sum
        self.pad = pad
        self.untie_biases = untie_biases
        # if untie_biases == True, each position in the output map has its own
        # bias (as opposed to having the same bias everywhere for a given
        # filter)
        self.mb_size = self.input_layer.mb_size

        self.filter_shape = (n_channels, filter_size, filter_size, n_filters)

        self.trainable = trainable
        self.W = layers.shared_single(4)

        if self.untie_biases:
            self.b = layers.shared_single(3)
        else:
            self.b = layers.shared_single(1)

        self.params = [self.W, self.b]
        self.bias_params = [self.b]

        self.data_order = layers.data_order.type2

        assert (len(self.input_layer.get_output_shape()) == 4), \
            'Input must have 4 dimensions.'

        assert (self.input_layer.data_order == self.data_order), \
            'Input data order does not match this layer\'s data order.'

        self.reset_params()

        self.filter_acts_op = FilterActs(stride=self.stride,
                                         partial_sum=self.partial_sum,
                                         pad=self.pad)