Esempio n. 1
0
    def __init__(self, rngs, input_layer, Lshape, traits, activation):
        super(ConvLayer, self).__init__(input_layer, traits, "Conv")

        self.rng = rngs[0]
        self.l2decay = traits['l2decay']
        filter_shape = Lshape[1]
        # The number of input channels must match number of filter channels
        assert Lshape[0][1] == filter_shape[1]
        self.pad = traits['padding']

        self.W = NNl.gen_weights(self.rng, filter_shape, 0, traits['initW'])

        # convolve input feature maps with filters
        # Using Alex K.'s fast CUDA conv, courtesy of S. Dieleman
        self.x = self.input_layer.output(False)
        conv_op = FilterActs(pad=self.pad, partial_sum=1)
        input_shuffled = (self.x).dimshuffle(1, 2, 3, 0) # bc01 to c01b
        filters_shuffled = (self.W).dimshuffle(1, 2, 3, 0) # bc01 to c01b
        contiguous_input = gpu_contiguous(input_shuffled)
        contiguous_filters = gpu_contiguous(filters_shuffled)
        out_shuffled = conv_op(contiguous_input, contiguous_filters)
        self.conv_out = out_shuffled.dimshuffle(3, 0, 1, 2) # c01b to bc01

        # store parameters of this layer
        self.params = [self.W]
Esempio n. 2
0
    def __init__(self, rngs, input_layer, Lshape, traits, activation):
        super(OutputLayer, self).__init__(input_layer, traits, "Output")
        self.out_shape = (Lshape[0], Lshape[1])
        self.W_shape = Lshape[1:]
        self.activation = activation
        self.l2decay = traits['l2decay']

        if len(Lshape) != 3:
            print("Logistic regression shape must be (2,), it is,", Lshape)

        # Initialize weights and biases (can load values later)
        self.W = NNl.gen_weights(rngs[0], self.W_shape, 0, traits['initW'])
        self.b = Tsh(np.zeros((Lshape[2],), dtype=Tfloat))
        self.params = [self.W, self.b]
Esempio n. 3
0
    def __init__(self, rngs, input_layer, Lshape, traits, activation):
        super(FCLayer, self).__init__(input_layer, traits, "FC")

        self.p_retain = (1. - traits['dropout'])
        self.rng = rngs[0]
        self.srng = rngs[1]
        self.out_shape = (Lshape[0], Lshape[2])
        self.W_shape = Lshape[1:]
        self.activation = activation
        self.l2decay = traits['l2decay']
        self.d_rec = input_layer.output(False)
        self.best_error = np.inf

        if len(Lshape) != 3:
            print "FC layer shape must be (2,), it is,", Lshape

        self.W = NNl.gen_weights(self.rng, self.W_shape, 0, traits['initW'])
        self.b = Tsh(np.zeros(Lshape[2], dtype=Tfloat))
        self.ib = Tsh(np.zeros(Lshape[1], dtype=Tfloat))
        self.params = [self.W, self.b,]
        self.pt_params = [self.W, self.b, self.ib]