Пример #1
0
 def __init__(
     self,
     learning_rate,
     use_locking=False,
     name='GradientDescent',
 ):
     super(GradientDescentOptimizer, self).__init__(use_locking, name)
     self.updater = _updaters.SGDUpdater(learning_rate, 0.)
     self._set_dynamic_lr(learning_rate)
Пример #2
0
 def __init__(self,
              learning_rate,
              momentum,
              use_locking=False,
              name='Momentum',
              use_nesterov=False):
     super(MomentumOptimizer, self).__init__(use_locking, name)
     if not use_nesterov:
         self.updater = updaters.SGDUpdater(learning_rate, momentum)
     else:
         self.updater = updaters.NesterovUpdater(learning_rate, momentum)
Пример #3
0
    def __init__(self, prototxt):
        super(SGDSolver, self).__init__(prototxt=prototxt)
        self._updater = updaters.SGDUpdater(**self._update_param)

        # generates update targets
        for layer, blobs in self._net.params.items():  self._lr_blobs.extend(blobs)
        for idx, blob in enumerate(self._lr_blobs):
            if self._net._lr_mults[idx] > 0:
                if blob.diff is None: continue
                self._updater.append((blob.data, blob.diff),
                                     self._net._lr_mults[idx], self._net._decay_mults[idx])
        self.train = self._net.function
        self.tests = [test_net.function for test_net in self._test_nets]
        self.update = function(updater=self._updater)
Пример #4
0
 def __init__(self, prototxt):
     super(SGDSolver, self).__init__(prototxt=prototxt)
     self._optimizer = updaters.SGDUpdater(**self._update_param)
     self.BuildOptimizer()
Пример #5
0
 def __init__(self,
              learning_rate,
              use_locking=False,
              name='GradientDescent'):
     super(GradientDescentOptimizer, self).__init__(use_locking, name)
     self.updater = updaters.SGDUpdater(learning_rate, 0.0)
Пример #6
0
 def __init__(self, learning_rate, momentum):
     super(MomentumOptimizer, self).__init__()
     self.updater = updaters.SGDUpdater(learning_rate, momentum)
Пример #7
0
 def __init__(self, learning_rate):
     super(GradientDescentOptimizer, self).__init__()
     self.updater = updaters.SGDUpdater(learning_rate, 0.0)
Пример #8
0
 def __init__(self, proto_txt):
     super(SGDSolver, self).__init__(proto_txt=proto_txt)
     self.optimizer = _updaters.SGDUpdater(**self._optimizer_arguments)
     self.BuildOptimizer()