예제 #1
0
파일: SdA.py 프로젝트: nnkoushik/tetris
    def pretraining_functions(self, train_set_x, batch_size):

        # index to a [mini]batch
        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')  # learning rate to use
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        for dA in self.dA_layers:
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
                    theano.Param(corruption_level, default=0.2),
                    theano.Param(learning_rate, default=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #2
0
파일: sda.py 프로젝트: missmagnum/sdai
    def pretraining_functions(self, train_set_x, batch_size):

       
        index = T.lscalar('index') 
        corruption_level = T.scalar('corruption')  
        learning_rate = T.scalar('lr')  
        batch_begin = index * batch_size
        batch_end = batch_begin + batch_size
        
        pretrain_fns = []
        for dA in self.dA_layers:
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            fn = theano.function(
                inputs=[
                    index,
                    theano.In(corruption_level, value=0.1),
                    theano.In(learning_rate, value=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #3
0
    def pretraining_functions(self, pretrain_set_x, batch_size):

        # index to a [mini]batch
        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')  # learning rate to use
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        for dA in self.dA_layers:
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
                    theano.In(corruption_level, value=0.2),
                    theano.In(learning_rate, value=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={self.x: pretrain_set_x[batch_begin:batch_end]})
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #4
0
    def pretraining_functions_data(self, train_set_x, batch_size):

        # index to a [mini]batch
        index = T.lscalar('index')
        corruption_level = T.scalar('corruption')  # % corruption rate of the denoising auto-encoder
        learning_rate = T.scalar('lr')  # learning rate
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        for dA in self.dA_layers_data:
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
                    corruption_level,
                    learning_rate
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #5
0
    def get_pretraining_cost_updates(self, corruption_level, learning_rate):
        cost_updates = []
        for dA in self.dA_layers:
            cost_update = dA.get_cost_updates(corruption_level, learning_rate)
            cost_updates.append(cost_update)

        return cost_updates
예제 #6
0
    def pretraining_functions(self, train_set_x, batch_size):

        index = T.lscalar('index')
        corruption_level = T.scalar('corruption')
        learning_rate = T.scalar('lr')

        n_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

        batch_begin = index * batch_size

        batch_end = batch_begin + batch_size

        pretrain_fns = []

        for dA in self.dA_layers:
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)

            fn = theano.function(inputs=[index,
                                         theano.Param(corruption_level, default=0.2),
                                         theano.Param(learning_rate, default=0.1)],
                                 outputs=cost,
                                 updates=updates,
                                 givens={self.x: train_set_x[batch_begin:batch_end]})

            pretrain_fns.append(fn)

        return pretrain_fns
예제 #7
0
    def pretraining_functions(self, train_set_x, batch_size):

        index = T.lscalar('index')
        corruption_level = T.scalar('corruption')
        learning_rate = T.scalar('lr')
        batch_begin = index * batch_size
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        for dA in self.dA_layers:
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)

            fn = theano.function(
                inputs=[
                    index,
                    theano.Param(corruption_level, default=0.2),
                    theano.Param(learning_rate, default=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={self.x: train_set_x[batch_begin:batch_end]})
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #8
0
    def pretraining_functions(self, train_set_x, batch_size):
        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')  # learning rate to use
        # number of batches
        n_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        for dA in self.dA_layers:
            # get the cost and the updates list
            # in each layer
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            fn = theano.function(inputs=[index,
                              theano.Param(corruption_level, default=0.2),
                              theano.Param(learning_rate, default=0.1)],
                                 outputs=cost,
                                 updates=updates,
                                 givens={self.x: train_set_x[batch_begin:
                                                             batch_end]})
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #9
0
    def pretraining_functions(self, train_set_x, batch_size):
        ''' Generates a list of functions, each of them implementing one
        step in trainnig the dA corresponding to the layer with same index.
        The function will require as input the minibatch index, and to train
        a dA you just need to iterate, calling the corresponding function on
        all minibatch indexes.

        :type train_set_x: theano.tensor.TensorType
        :param train_set_x: Shared variable that contains all datapoints used
                            for training the dA

        :type batch_size: int
        :param batch_size: size of a [mini]batch

        :type learning_rate: float
        :param learning_rate: learning rate used during training for any of
                              the dA layers
        '''

        # index to a [mini]batch
        print "A"
        index = T.lscalar('index')  # index to a minibatch
        print "B"
        corruption_level = T.scalar('corruption')  # % of corruption to se
        print "C"
        learning_rate = T.scalar('lr')  # learning rate to use
        # number of batches
        print "D"
        n_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
        # begining of a batch, given `index`
        print "E"
        batch_begin = index * batch_size
        # ending of a batch given `index`
        print "F"
        batch_end = batch_begin + batch_size

        print "G"
        pretrain_fns = []
        print "H"
        for dA in self.dA_layers:
            print "I"
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            print "J"
            # compile the theano function
            fn = theano.function(inputs=[index,
                              theano.Param(corruption_level, default=0.2),
                              theano.Param(learning_rate, default=0.1)],
                                 outputs=cost,
                                 updates=updates,
                                 givens={self.x: train_set_x[batch_begin:
                                                             batch_end]})
            # append `fn` to the list of functions
            print "K"
            pretrain_fns.append(fn)

        print "L"
        return pretrain_fns
예제 #10
0
    def pretraining_functions(self, train_set_x, batch_size):
        ''' Generates a list of functions, each of them implementing one
        step in trainnig the dA corresponding to the layer with same index.
        The function will require as input the minibatch index, and to train
        a dA you just need to iterate, calling the corresponding function on
        all minibatch indexes.

        :type train_set_x: theano.tensor.TensorType
        :param train_set_x: Shared variable that contains all datapoints used
                            for training the dA

        :type batch_size: int
        :param batch_size: size of a [mini]batch

        :type learning_rate: float
        :param learning_rate: learning rate used during training for any of
                              the dA layers
        '''

        # index to a [mini]batch
        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')
        #noise_type = T.scalar('nt')# learning rate to use
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        for dA in self.dA_layers:
            # get the cost and the updates list
#            best_params = dA.params
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            
            fn = theano.function(
                inputs=[
                    index,
                    theano.Param(corruption_level, default=0.2),
                    theano.Param(learning_rate, default=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns
 def pretraining_functions(self, train_set_x, batch_size):
     index = T.lvector('index')
     corruption = T.scalar('corruption')
     learning_rate = T.scalar('learning_rate')
     pretrain_fns = []
     for dA in self.dA_layers:
         cost, updates = dA.get_cost_updates(corruption, learning_rate)
         fn = theano.function(inputs=[index, corruption, learning_rate],
                              outputs=[cost],
                              updates=updates,
                              givens={self.x: train_set_x[index]})
         pretrain_fns.append(fn)
     return pretrain_fns
예제 #12
0
    def pretraining_functions_test(self, train_set_x,batch_size,corruption_level):
        ''' Generates a list of functions, each of them implementing one
        step in trainnig the dA corresponding to the layer with same index.
        The function will require as input the minibatch index, and to train
        a dA you just need to iterate, calling the corresponding function on
        all minibatch indexes.

        :type train_set_x: theano.tensor.TensorType
        :param train_set_x: Shared variable that contains all datapoints used
                            for training the dA

        :type batch_size: int
        :param batch_size: size of a [mini]batch

        :type learning_rate: float
        :param learning_rate: learning rate used during training for any of
                              the dA layers
        '''

        # index to a [mini]batch
        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')  # learning rate to use
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        pretrain_fns = []

        for dA in self.dA_layers:
            # get the cost and the updates list
            cost = dA.get_cost_updates(corruption_level,learning_rate,anomaly=True)

            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
	            theano.In(corruption_level, value=0.2),

                ],
                outputs=cost,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            # append `fn` to the list of functions
            pretrain_fns.append(fn)


        return pretrain_fns
예제 #13
0
    def pretraining_functions(self, train_set_x, batch_size, mu):
        ''' Generates a list of functions, each of them implementing one
        step in trainnig the dA corresponding to the layer with same index.
        The function will require as input the minibatch index, and to train
        a dA you just need to iterate, calling the corresponding function on
        all minibatch indexes.

        :type train_set_x: theano.tensor.TensorType
        :param train_set_x: Shared variable that contains all datapoints used
                            for training the dA

        :type batch_size: int
        :param batch_size: size of a [mini]batch
        
        :type mu: float
        :param mu: extrapolation parameter used for implementing Nesterov-type acceleration

        '''

        # index to a [mini]batch
        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')  # learning rate to use
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size
        
        pretrain_fns = []
        for dA in self.dA_layers:
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate, mu)
            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
                    theano.In(corruption_level),
                    theano.In(learning_rate)
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                },
                on_unused_input='ignore'
            )
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns    
예제 #14
0
    def training_functions(self, train_set_x, batch_size):
        ''' Generates a list of functions, each of them implementing one
        step in trainnig the dA corresponding to the layer with same index.
        The function will require as input the minibatch index, and to train
        a dA you just need to iterate, calling the corresponding function on
        all minibatch indexes.

        :type train_set_x: theano.tensor.TensorType
        :param train_set_x: Shared variable that contains all datapoints used
                            for training the dA 

        :type batch_size: int
        :param batch_size: size of a [mini]batch    
        '''

        index = T.lscalar('index')  # index to a minibatch
        corruption_level = T.scalar('corruption')  # % of corruption to use
        learning_rate = T.scalar('lr')  # learning rate to use
        # number of batches
        n_batches = int(
            train_set_x.get_value(borrow=True).shape[0] / batch_size)
        # begining of a batch, given `index`
        batch_begin = index * batch_size
        # ending of a batch given `index`
        batch_end = batch_begin + batch_size

        train_fns = []
        for dA in self.dA_layers:
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
                    theano.In(corruption_level, value=0.2),
                    theano.In(learning_rate, value=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={self.x: train_set_x[batch_begin:batch_end]})
            # append `fn` to the list of functions
            train_fns.append(fn)

        return train_fns
예제 #15
0
    def pretraining_functions(self, train_set_x, batch_size):

        index = T.lscalar('index')  
        corruption_level = T.scalar('corruption') 
        learning_rate = T.scalar('lr')  
        
        batch_begin = index * batch_size
        batch_end = batch_begin + batch_size

        pretrain_fns = []
        decoding_fns  = []
        for dA in self.dA_layers:
            cost, updates, y, decoding_fn = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            fn = theano.function(
                inputs=[
                    index,
                    theano.Param(corruption_level, default=0.2),
                    theano.Param(learning_rate, default=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            pretrain_fns.append(fn)
            decoding_fns.append(decoding_fn)
            
        output = y
        for n in xrange(len(decoding_fns)):
            n = -1 - n
            output = decoding_fns[n](output)
            
        output_fn = theano.function(
            [index,
             theano.Param(corruption_level, default=0.0)],
            output,
            givens={
                self.x: train_set_x[batch_begin: batch_end]
            }
        )

        return pretrain_fns, output_fn
예제 #16
0
파일: SdA.py 프로젝트: jolinxql/SdA-My-Exp
    def pretraining_functions(self, train_set_x, batch_size):
        ''' Generates a list of functions, each of them implementing one
        step in trainnig the dA corresponding to the layer with same index.
        The function will require as input the minibatch index, and to train
        a dA you just need to iterate, calling the corresponding function on
        all minibatch indexes.
        '''
        
        # index to a [mini]batch
        index=T.lscalar('index')
        corruption_level=T.scalar('corruption')
        learning_rate=T.scalar('lr')
        
        # begining of a batch, given index
        batch_begin=index*batch_size
        batch_end=batch_begin+batch_size
        
        pretrain_fns=[]
        for layer_i,dA in enumerate(self.dA_layers):
            print('pretraining layer %d functions built.'%layer_i)
            # get the cost and the updates list
            cost, updates = dA.get_cost_updates(corruption_level,
                                                learning_rate)
            # compile the theano function
            fn = theano.function(
                inputs=[
                    index,
                    theano.In(corruption_level, value=0.2),
                    theano.In(learning_rate, value=0.1)
                ],
                outputs=cost,
                updates=updates,
                givens={
                    self.x: train_set_x[batch_begin: batch_end]
                }
            )
            # append `fn` to the list of functions
            pretrain_fns.append(fn)

        return pretrain_fns
예제 #17
0
    def pretraining_function(self,train_set_x,batch_size):
        '''
        生成函数列表,每个函数执行一层中dA的训练,返回预训练的函数列表
        函数输入是minibatch的索引,在所有的minibatch执行相同的训练
        train_set_x: theano.tensor.TensorType   训练dA的数据点(共享变量)
        batch_size: int  [mini]batch大小

        '''
        #[mini]batch的索引
        index=T.lscalar('index')
        corruption_level=T.scalar('corruption') #corruption百分比
        learning_rate=T.scalar('lr') #学习率
        #batch数量
        n_bathes=train_set_x.get_value(borrow=True).shape[0]/batch_size
        #给定index后,起始的
        # batch
        batch_begin=index*batch_size
        #给定index后,结束的batch
        batch_end=batch_begin+batch_size

        pretrain_fns=[]
        for dA in self.dA_layers: #遍历dA
            #创建代价列表和更新列表
            cost,updates=dA.get_cost_updates(corruption_level,
                                            learning_rate)
            #创建theano函数
            fn=theano.function(inputs=[index,
                            theano.Param(corruption_level,default=0.2),
                            theano.Param(learning_rate,default=0.1)],
                                outputs=cost,
                                updates=updates,
                                givens={self.x:train_set_x[batch_begin:
                                                           batch_end]})
            #将fn添加到函数列表
            pretrain_fns.append(fn)

        return pretrain_fns