Beispiel #1
0
 def __init__(self,
              learning_rate,
              momentum,
              use_locking=False,
              name='Momentum',
              use_nesterov=False):
     super(MomentumOptimizer, self).__init__(use_locking, name)
     if not use_nesterov:
         self.updater = updaters.SGDUpdater(learning_rate, momentum)
     else:
         self.updater = updaters.NesterovUpdater(learning_rate, momentum)
Beispiel #2
0
    def __init__(self, prototxt):
        super(NesterovSolver, self).__init__(prototxt=prototxt)
        self._updater = updaters.NesterovUpdater(**self._update_param)

        # generates update targets
        for layer, blobs in self._net.params.items():  self._lr_blobs.extend(blobs)
        for idx, blob in enumerate(self._lr_blobs):
            if self._net._lr_mults[idx] > 0:
                if blob.diff is None: continue
                self._updater.append((blob.data, blob.diff),
                                     self._net._lr_mults[idx], self._net._decay_mults[idx])
        self.train = self._net.function
        self.tests = [test_net.function for test_net in self._test_nets]
        self.update = function(updater=self._updater)
Beispiel #3
0
 def __init__(self, prototxt):
     super(NesterovSolver, self).__init__(prototxt=prototxt)
     self._optimizer = updaters.NesterovUpdater(**self._update_param)
     self.BuildOptimizer()
Beispiel #4
0
 def __init__(self, proto_txt):
     super(NesterovSolver, self).__init__(proto_txt=proto_txt)
     self.optimizer = _updaters.NesterovUpdater(**self._optimizer_arguments)
     self.BuildOptimizer()