Пример #1
0
    def test_create_optimizer(self):
        options_str = "adagrad{}"
        options = text_format.Merge(options_str, optimizer_pb2.Optimizer())
        opt = optimization.create_optimizer(options)
        self.assertIsInstance(opt, tf.compat.v1.train.AdagradOptimizer)

        options_str = "rmsprop{}"
        options = text_format.Merge(options_str, optimizer_pb2.Optimizer())
        opt = optimization.create_optimizer(options)
        self.assertIsInstance(opt, tf.compat.v1.train.RMSPropOptimizer)

        options_str = "adam{}"
        options = text_format.Merge(options_str, optimizer_pb2.Optimizer())
        opt = optimization.create_optimizer(options)
        self.assertIsInstance(opt, tf.compat.v1.train.AdamOptimizer)
Пример #2
0
 def testBuildEmptyOptimizer(self):
   optimizer_text_proto = """
   """
   optimizer_proto = optimizer_pb2.Optimizer()
   text_format.Merge(optimizer_text_proto, optimizer_proto)
   with self.assertRaises(ValueError):
     optimizer_builder.build(optimizer_proto)
 def testBuildEmptyOptimizer(self):
     optimizer_text_proto = """
 """
     global_summaries = set([])
     optimizer_proto = optimizer_pb2.Optimizer()
     text_format.Merge(optimizer_text_proto, optimizer_proto)
     with self.assertRaises(ValueError):
         optimizer_builder.build(optimizer_proto, global_summaries)
Пример #4
0
    def test_build_optimizer(self):
        # Gradient descent optimizer.

        options_str = r"""
      sgd {
      }
    """
        options = optimizer_pb2.Optimizer()
        text_format.Merge(options_str, options)
        opt = training_utils.build_optimizer(options)
        self.assertIsInstance(opt, tf.train.GradientDescentOptimizer)

        # Adagrad optimizer.

        options_str = r"""
      adagrad {
      }
    """
        options = optimizer_pb2.Optimizer()
        text_format.Merge(options_str, options)
        opt = training_utils.build_optimizer(options)
        self.assertIsInstance(opt, tf.train.AdagradOptimizer)

        # Adam optimizer.

        options_str = r"""
      adam {
      }
    """
        options = optimizer_pb2.Optimizer()
        text_format.Merge(options_str, options)
        opt = training_utils.build_optimizer(options)
        self.assertIsInstance(opt, tf.train.AdamOptimizer)

        # Rmsprop optimizer.

        options_str = r"""
      rmsprop {
      }
    """
        options = optimizer_pb2.Optimizer()
        text_format.Merge(options_str, options)
        opt = training_utils.build_optimizer(options)
        self.assertIsInstance(opt, tf.train.RMSPropOptimizer)
Пример #5
0
 def testBuildAdamOptimizer(self):
   optimizer_text_proto = """
     adam_optimizer: {
       learning_rate: {
         constant_learning_rate {
           learning_rate: 0.002
         }
       }
     }
     use_moving_average: false
   """
   optimizer_proto = optimizer_pb2.Optimizer()
   text_format.Merge(optimizer_text_proto, optimizer_proto)
   optimizer, _ = optimizer_builder.build(optimizer_proto)
   self.assertTrue(isinstance(optimizer, tf.train.AdamOptimizer))
Пример #6
0
 def testBuildMomentumOptimizer(self):
   optimizer_text_proto = """
     momentum_optimizer: {
       learning_rate: {
         constant_learning_rate {
           learning_rate: 0.001
         }
       }
       momentum_optimizer_value: 0.99
     }
     use_moving_average: false
   """
   optimizer_proto = optimizer_pb2.Optimizer()
   text_format.Merge(optimizer_text_proto, optimizer_proto)
   optimizer, _ = optimizer_builder.build(optimizer_proto)
   self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer))
 def testBuildMovingAverageOptimizer(self):
     optimizer_text_proto = """
   adam_optimizer: {
     learning_rate: {
       constant_learning_rate {
         learning_rate: 0.002
       }
     }
   }
   use_moving_average: True
 """
     global_summaries = set([])
     optimizer_proto = optimizer_pb2.Optimizer()
     text_format.Merge(optimizer_text_proto, optimizer_proto)
     optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
     self.assertTrue(
         isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
Пример #8
0
 def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
   optimizer_text_proto = """
     adam_optimizer: {
       learning_rate: {
         constant_learning_rate {
           learning_rate: 0.002
         }
       }
     }
     use_moving_average: True
     moving_average_decay: 0.2
   """
   optimizer_proto = optimizer_pb2.Optimizer()
   text_format.Merge(optimizer_text_proto, optimizer_proto)
   optimizer, _ = optimizer_builder.build(optimizer_proto)
   self.assertTrue(
       isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
   # TODO: Find a way to not depend on the private members.
   self.assertAlmostEqual(optimizer._ema._decay, 0.2)
Пример #9
0
 def testBuildRMSPropOptimizer(self):
   optimizer_text_proto = """
     rms_prop_optimizer: {
       learning_rate: {
         exponential_decay_learning_rate {
           initial_learning_rate: 0.004
           decay_steps: 800720
           decay_factor: 0.95
         }
       }
       momentum_optimizer_value: 0.9
       decay: 0.9
       epsilon: 1.0
     }
     use_moving_average: false
   """
   optimizer_proto = optimizer_pb2.Optimizer()
   text_format.Merge(optimizer_text_proto, optimizer_proto)
   optimizer, _ = optimizer_builder.build(optimizer_proto)
   self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer))