Пример #1
0
        def get_updates(self, loss, params):
            lr_multiplier = piecewise_linear(self.iterations, self.lr_schedule)

            old_update = K.update

            def new_update(x, new_x):
                if is_one_of(x, params):
                    new_x = x + (new_x - x) * lr_multiplier
                return old_update(x, new_x)

            K.update = new_update
            updates = super(new_optimizer, self).get_updates(loss, params)
            K.update = old_update

            return updates
Пример #2
0
 def _decayed_lr(self, var_dtype):
     lr_multiplier = piecewise_linear(self.iterations, self.lr_schedule)
     lr_t = super(new_optimizer, self)._decayed_lr(var_dtype)
     return lr_t * K.cast(lr_multiplier, var_dtype)
Пример #3
0
 def __init__(self, lr_schedule, *args, **kwargs):
     super(new_optimizer, self).__init__(*args, **kwargs)
     self.lr_schedule = {int(i): j for i, j in lr_schedule.items()}
     self.lr_multiplier = piecewise_linear(self.iterations,
                                           self.lr_schedule)
Пример #4
0
 def __init__(self, optimizer, schedule=None, **kwargs):
     super(PiecewiseLinearLearningRate, self).__init__(optimizer, **kwargs)
     self.schedule = {int(i): j for i, j in schedule.items()}
     factor = piecewise_linear(self.iterations, self.schedule)
     self.learning_rate = self.learning_rate * factor
Пример #5
0
 def __call__(self, step):
     with ops.name_scope_v2(self.name or "PiecewiseLinear") as name:
         return piecewise_linear(step, self.schedule)