Exemplo n.º 1
0
class DeConvLayer(object):

    def __init__(self, in_channels, out_channels, kernel_len, activation, batch_norm = False, unflatten_input = None):

        self.filter_shape = np.asarray((in_channels, out_channels, kernel_len, kernel_len))

        self.activation = activation

        self.unflatten_input = unflatten_input

        self.batch_norm = batch_norm

        std = 0.02

        self.W = Weight(self.filter_shape, mean = 0.0, std = std).val
        self.b = Weight(self.filter_shape[1], mean = 0.0, std=0).val
        if batch_norm:
            self.bn_mean = theano.shared(np.zeros(shape = (1,out_channels,1,1)).astype('float32'))
            self.bn_std = theano.shared(np.random.normal(1.0, 0.000001, size = (1,out_channels,1,1)).astype('float32'))

    def output(self, input):

        if self.unflatten_input != None:
            input = T.reshape(input, self.unflatten_input)

        conv_out = deconv(input, self.W, subsample=(2, 2), border_mode=(2,2))

        conv_out = conv_out + self.b.dimshuffle('x', 0, 'x', 'x')

        if self.batch_norm:
            conv_out = (conv_out - conv_out.mean(axis = (0,2,3), keepdims = True)) / (1.0 + conv_out.std(axis = (0,2,3), keepdims = True))
            conv_out = conv_out * T.addbroadcast(self.bn_std,0,2,3) + T.addbroadcast(self.bn_mean,0,2,3)

        if self.activation == "relu":
            out = T.maximum(0.0, conv_out)
        elif self.activation == "tanh":
            out = T.tanh(conv_out)
        elif self.activation == None:
            out = conv_out
        else:
            raise Exception()


        self.params = {'W' : self.W, 'b' : self.b}
        if self.batch_norm:
            self.params["mu"] = self.bn_mean
            self.params["sigma"] = self.bn_std

        return out

    def getParams(self):
        return self.params
Exemplo n.º 2
0
    def __init__(self, in_channels, out_channels, kernel_len, activation, batch_norm = False, unflatten_input = None):

        self.filter_shape = np.asarray((in_channels, out_channels, kernel_len, kernel_len))

        self.activation = activation

        self.unflatten_input = unflatten_input

        self.batch_norm = batch_norm

        std = 0.02

        self.W = Weight(self.filter_shape, mean = 0.0, std = std).val
        self.b = Weight(self.filter_shape[1], mean = 0.0, std=0).val
        if batch_norm:
            self.bn_mean = theano.shared(np.zeros(shape = (1,out_channels,1,1)).astype('float32'))
            self.bn_std = theano.shared(np.random.normal(1.0, 0.000001, size = (1,out_channels,1,1)).astype('float32'))