Example #1
0
    def get_optimizer(self, multitask):
        """Get the optimizer."""
        optconf = self.config['solver']['optimizer']
        method = optconf['name']
        learning_rate = self.get_learning_rate()
        if method == 'adadelta':
            opt = tf.train.AdadeltaOptimizer(learning_rate)
        elif method == 'adam':
            opt = tf.train.AdamOptimizer(learning_rate)
        elif method == 'adagrad':
            opt = tf.train.AdagradOptimizer(learning_rate)
        elif method == 'momentum':
            opt = tf.train.MomentumOptimizer(learning_rate, momentum=0.9)
        elif method == 'rmsprop':
            opt = tf.train.RMSPropOptimizer(learning_rate)
        elif method == 'gradientdecent':
            opt = tf.train.GradientDescentOptimizer(learning_rate)
        elif method == 'lazyadam':
            opt = tf.contrib.opt.LazyAdamOptimizer(learning_rate)
        elif method == 'weightedadam':
            weight_decay = self.config['solver']['optimizer']['weight_decay']
            opt = tf.contrib.opt.AdamWOptimizer(weight_decay=weight_decay,
                                                learning_rate=learning_rate)
        elif method == 'yellowfin':
            opt = optimizer.YFOptimizer(learning_rate)
        else:
            raise ValueError("Not support optimizer: {}".format(method))

        if multitask:
            opt = tf.contrib.opt.MultitaskOptimizerWrapper(opt)
            logging.info("Using multi-task optimizer")
        return opt
Example #2
0
  def get_optimizer(self):
    """Get the optimizer."""
    optconf = self.config['solver']['optimizer']
    method = optconf['name']
    learning_rate = self.get_learning_rate()
    if method == 'adadelta':
      opt = tf.train.AdadeltaOptimizer(learning_rate)
    elif method == 'adam':
      opt = tf.train.AdamOptimizer(learning_rate)
    elif method == 'adagrad':
      opt = tf.train.AdagradOptimizer(learning_rate)
    elif method == 'momentum':
      opt = tf.train.MomentumOptimizer(learning_rate, momentum=0.9)
    elif method == 'rmsprop':
      opt = tf.train.RMSPropOptimizer(learning_rate)
    elif method == 'gradientdecent':
      opt = tf.train.GradientDescentOptimizer(learning_rate)
    elif method == 'lazyadam':
      opt = tfa.optimizers.LazyAdam(learning_rate)
    elif method == 'weightedadam':
      weight_decay = self.config['solver']['optimizer']['weight_decay']
      opt = tfa.optimizers.AdamW(
          weight_decay=weight_decay, learning_rate=learning_rate)
    elif method == 'yellowfin':
      opt = optimizer.YFOptimizer(learning_rate)
    else:
      raise ValueError("Not support optimizer: {}".format(method))

    return opt