def __init__(self, batch_size=600,learning_rate=0.13, L1_lambda=0.00, L2_lambda=0.0000, n_epochs=1000):
     '''
     
     '''
     trainer.__init__(self)
     self.batch_size = batch_size
     self.L1_lambda = L1_lambda;
     self.L2_lambda = L2_lambda;
     self.learning_rate =learning_rate;
     self.n_epochs=n_epochs;
 def __init__(self, batch_size=600,learning_rate=0.001, L1_lambda=0.00, L2_lambda=0.0000, n_epochs=1000,decay=0.9,momentum = 0.0):
     '''
     
     '''
     trainer.__init__(self)
     self.batch_size = batch_size
     self.L1_lambda = L1_lambda;
     self.L2_lambda = L2_lambda;
     self.learning_rate =learning_rate;
     self.n_epochs=n_epochs;
     self.decay = decay
     self.momentum = momentum
 def __init__(self, learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000, batch_size=20):
     '''
     Constructor
     learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
          dataset='mnist.pkl.gz', batch_size=20, n_hidden=500
     '''
     trainer.__init__(self)
     self.batch_size = batch_size
     self.L1_lambda = L1_reg;
     self.L2_lambda = L2_reg;
     self.learning_rate =learning_rate;
     self.n_epochs=n_epochs;
     self.early_stopping_threshold = 0.995
Exemple #4
0
 def __init__(self, module, dataset=None, learningrate=0.01, lrdecay=1.0,
              momentum=0., verbose=False, batchlearning=False,
              weightdecay=0.):
     
     trainer.__init__(self, module)
     self.verbose = verbose
     self.batchlearning = batchlearning
     self.weightdecay = weightdecay
     self.epoch = 0
     self.totalepochs = 0
     self.descent = GradientDescent()
     self.descent.alpha = learningrate
     self.descent.momentum = momentum
     self.descent.alphadecay = lrdecay
     self.descent.init(module.weights)