Esempio n. 1
0
File: sda.py Progetto: magic2du/dlnn
    def __init__(self, numpy_rng, theano_rng=None, cfg = None, dnn = None):
        """ Stacked Denoising Autoencoders for DNN Pre-training """

        self.cfg = cfg
        self.hidden_layers_sizes = cfg.hidden_layers_sizes
        self.n_ins = cfg.n_ins
        self.hidden_layers_number = len(self.hidden_layers_sizes)

        self.dA_layers = []
        self.sigmoid_layers = []
        self.params = []
        if not theano_rng:
            theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
        # allocate symbolic variables for the data
        self.x = dnn.x

        for i in xrange(self.hidden_layers_number):
            # the size of the input is either the number of hidden units of
            # the layer below, or the input size if we are on the first layer
            if i == 0:
                input_size = self.n_ins
                layer_input = self.x
            else:
                input_size = self.hidden_layers_sizes[i - 1]
                layer_input = dnn.layers[i-1].output

            # Construct a denoising autoencoder that shared weights with this layer
            if i == 0:
                reconstruct_activation = cfg.firstlayer_reconstruct_activation
            else:
                reconstruct_activation = cfg.hidden_activation
            self.sigmoid_layers.append(dnn.layers[i])
            self.params.extend(dnn.layers[i].params)
            dA_layer = dA(numpy_rng=numpy_rng,
                          theano_rng=theano_rng,
                          input=layer_input,
                          n_visible=input_size,
                          n_hidden=self.hidden_layers_sizes[i],
                          W=dnn.layers[i].W,
                          bhid=dnn.layers[i].b,
                          sparsity = cfg.sparsity,
                          sparsity_weight = cfg.sparsity_weight,
                          hidden_activation = cfg.hidden_activation,
                          reconstruct_activation = reconstruct_activation,
                          contraction_level= self.cfg.contraction_level, 
                          n_batchsize = self.cfg.batch_size)
            self.dA_layers.append(dA_layer)
Esempio n. 2
0
File: sda.py Progetto: magic2du/dlnn
    def __init__(self, numpy_rng, theano_rng=None, cfg = None):
        
        self.cfg = cfg
        self.hidden_layers_sizes = cfg.hidden_layers_sizes
        self.n_ins = cfg.n_ins+ cfg.n_outs
        self.hidden_layers_number = len(self.hidden_layers_sizes)

        self.dA_layers = []
        self.sigmoid_layers = []
        self.params = []
        if not theano_rng:
            theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
        # allocate symbolic variables for the data
        self.xy = T.matrix('xy', dtype=theano.config.floatX) 

        for i in xrange(self.hidden_layers_number):
            # the size of the input is either the number of hidden units of
            # the layer below, or the input size if we are on the first layer
            
            if i == 0:
                input_size = self.n_ins

            else:
                if self.cfg.settings.has_key('firstlayer_xy') and self.cfg.settings['firstlayer_xy'] ==  1:
                    input_size = self.hidden_layers_sizes[i - 1]
                else:
                    input_size = self.hidden_layers_sizes[i - 1] + cfg.n_outs


            # Construct a denoising autoencoder that shared weights with this layer
            if i == 0:
                reconstruct_activation = cfg.firstlayer_reconstruct_activation
            else:
                reconstruct_activation = cfg.hidden_activation

            dA_layer = dA(numpy_rng=numpy_rng,
                          theano_rng=theano_rng,
                          input=None,
                          n_visible=input_size,
                          n_hidden=self.hidden_layers_sizes[i],
                          sparsity = cfg.sparsity,
                          sparsity_weight = cfg.sparsity_weight,
                          hidden_activation = cfg.hidden_activation,
                          reconstruct_activation = reconstruct_activation,
                          contraction_level= self.cfg.contraction_level,
                          n_batchsize = self.cfg.batch_size)
            self.dA_layers.append(dA_layer)