Ejemplo n.º 1
0
        def get_update(self, loss, params):
            # 获取当前 step 的 lr rate
            lr_rate_t = piecewise_linear(self.iterations, self.lr_schedule)

            old_update = K.update

            def new_update(x, new_x):
                new_x = x + (new_x - x) * lr_rate_t  # 按照当前lr rate 缩放 update
                return old_update(x, new_x)

            K.update = new_update
            updates = super(NewOptimizer, self).get_update(loss, params)
            K.update = old_update
            return updates
Ejemplo n.º 2
0
        def _decayed_lr(self, var_dtypes):
            """重写获取decayed learning rate 方法"""

            lr_t = super(NewOptimzer, self)._decayed_lr(var_dtypes)
            lr_rate = piecewise_linear(self.iterations, self.lr_schedule)
            return lr_t * K.cast(lr_rate, var_dtypes)