예제 #1
0
 def get(self, name=None):
     params = {} if self.params is None else self.params
     if self.opt_name == "Adam":
         if name is None:
             return AdamOptimizer(**params)
         else:
             return AdamOptimizer(name=name, **params)
     elif self.opt_name == "Adadelta":
         if name is None:
             return AdadeltaOptimizer(**params)
         else:
             return AdadeltaOptimizer(name=name, **params)
     else:
         raise NotImplemented()
예제 #2
0
파일: trainer.py 프로젝트: sjliu0920/MUPPET
    def get(self, name=None, lr_decay=None, global_step=None):
        params = {} if self.params is None else self.params.copy()
        with tf.variable_scope('opt'):
            lr_tensor = tf.get_variable('lr',
                                        dtype=tf.float32,
                                        initializer=tf.constant(
                                            params['learning_rate']),
                                        trainable=False)
            if lr_decay is not None:
                params['learning_rate'] = lr_decay(
                    learning_rate=params['learning_rate'],
                    global_step=global_step,
                    name='lr_decay')

            self.lr_op = lr_tensor if lr_decay is None else lr_tensor.assign(
                params['learning_rate'])
            params['learning_rate'] = self.lr_op
        if self.opt_name == "Adam":
            if name is None:
                return AdamOptimizer(**params)
            else:
                return AdamOptimizer(name=name, **params)
        elif self.opt_name == "Adadelta":
            if name is None:
                return AdadeltaOptimizer(**params)
            else:
                return AdadeltaOptimizer(name=name, **params)
        elif self.opt_name == "RMSprop":
            if name is None:
                return RMSPropOptimizer(**params)
            else:
                return RMSPropOptimizer(name=name, **params)
        elif self.opt_name == "Momentum":
            if name is None:
                return MomentumOptimizer(**params)
            else:
                return MomentumOptimizer(name=name, **params)
        else:
            raise NotImplemented()