Esempio n. 1
0
 def __init__(self,
              module,
              etaminus=0.5,
              etaplus=1.2,
              deltamin=1.0e-6,
              deltamax=5.0,
              delta0=0.1,
              **kwargs):
     """ Set up training algorithm parameters, and objects associated with the trainer.
     
         :arg module: the module whose parameters should be trained. 
         :key etaminus: factor by which step width is decreased when overstepping (0.5)
         :key etaplus: factor by which step width is increased when following gradient (1.2)
         :key delta: step width for each weight 
         :key deltamin: minimum step width (1e-6)
         :key deltamax: maximum step width (5.0)
         :key delta0: initial step width (0.1)           
     """
     BackpropTrainer.__init__(self, module, **kwargs)
     self.epoch = 0
     # set descender to RPROP mode and update parameters
     self.descent.rprop = True
     self.descent.etaplus = etaplus
     self.descent.etaminus = etaminus
     self.descent.deltamin = deltamin
     self.descent.deltamax = deltamax
     self.descent.deltanull = delta0
     self.descent.init(module.params)  # reinitialize, since mode changed
Esempio n. 2
0
    def __init__(self, module, etaminus=0.5, etaplus=1.2, deltamin=1.0e-6,
                 deltamax=5.0, delta0=0.1, **kwargs):
        """Set up training algorithm parameters, and objects associated with
        the trainer.

        Args:
            module: the module whose parameters should be trained.
            etaminus: factor by which step width is decreased when overstepping
              (0.5)
            etaplus: factor by which step width is increased when following
              gradient (1.2)
            delta: step width for each weight
            deltamin: minimum step width (1e-6)
            deltamax: maximum step width (5.0)
            delta0: initial step width (0.1)
        """
        BackpropTrainer.__init__(self, module, **kwargs)
        self.epoch = 0
        # set descender to RPROP mode and update parameters
        self.descent.rprop = True
        self.descent.etaplus = etaplus
        self.descent.etaminus = etaminus
        self.descent.deltamin = deltamin
        self.descent.deltamax = deltamax
        self.descent.deltanull = delta0
        self.descent.init(module.params)  # reinitialize, since mode changed
 def __init__(self, module, dataset=None, learningrate=0.01, lrdecay=1.0,momentum=0., verbose=False, batchlearning=False,weightdecay=0.):
     BackpropTrainer.__init__(self,module,dataset,learningrate,lrdecay,momentum,verbose,batchlearning,weightdecay)