Esempio n. 1
0
 def test_linearCosine_decay_tf(self):
     """test creating an optimizer with a linear cosine decay to the learning rate"""
     rate = optimizers.LinearCosineDecay(initial_rate=0.1,
                                         decay_steps=10000)
     opt = optimizers.Adam(learning_rate=rate)
     global_step = tf.Variable(0)
     tfopt = opt._create_tf_optimizer(global_step)
Esempio n. 2
0
 def test_exponential_decay_tf(self):
   """Test creating an optimizer with an exponentially decaying learning rate."""
   rate = optimizers.ExponentialDecay(
       initial_rate=0.001, decay_rate=0.99, decay_steps=10000)
   opt = optimizers.Adam(learning_rate=rate)
   global_step = tf.Variable(0)
   tfopt = opt._create_tf_optimizer(global_step)
Esempio n. 3
0
 def test_adam(self):
     """Test creating an Adam optimizer."""
     opt = optimizers.Adam(learning_rate=0.01)
     with self.session() as sess:
         global_step = tf.Variable(0)
         tfopt = opt._create_optimizer(global_step)
         assert isinstance(tfopt, tf.train.AdamOptimizer)
Esempio n. 4
0
 def test_linearCosine_decay_pytorch(self):
   """test creating an optimizer with a linear cosine decay to the learning rate"""
   rate = optimizers.LinearCosineDecay(initial_rate=0.1, decay_steps=10000)
   opt = optimizers.Adam(learning_rate=rate)
   params = [torch.nn.Parameter(torch.Tensor([1.0]))]
   torchopt = opt._create_pytorch_optimizer(params)
   schedule = rate._create_pytorch_schedule(torchopt)
Esempio n. 5
0
 def test_exponential_decay_pytorch(self):
   """Test creating an optimizer with an exponentially decaying learning rate."""
   rate = optimizers.ExponentialDecay(
       initial_rate=0.001, decay_rate=0.99, decay_steps=10000)
   opt = optimizers.Adam(learning_rate=rate)
   params = [torch.nn.Parameter(torch.Tensor([1.0]))]
   torchopt = opt._create_pytorch_optimizer(params)
   schedule = rate._create_pytorch_schedule(torchopt)
Esempio n. 6
0
 def test_linearCosine_decay_jax(self):
     """test creating an optimizer with a linear cosine decay to the learning rate"""
     import optax
     rate = optimizers.LinearCosineDecay(initial_rate=0.1,
                                         decay_steps=10000)
     opt = optimizers.Adam(learning_rate=rate)
     jaxopt = opt._create_jax_optimizer()
     assert isinstance(jaxopt, optax.GradientTransformation)
Esempio n. 7
0
 def test_polynomial_decay(self):
     """Test creating an optimizer with a polynomially decaying learning rate."""
     rate = optimizers.PolynomialDecay(initial_rate=0.001,
                                       final_rate=0.0001,
                                       decay_steps=10000)
     opt = optimizers.Adam(learning_rate=rate)
     with self.session() as sess:
         global_step = tf.Variable(0)
         tfopt = opt._create_optimizer(global_step)
Esempio n. 8
0
 def test_polynomial_decay_jax(self):
     """Test creating an optimizer with a polynomially decaying learning rate."""
     import optax
     rate = optimizers.PolynomialDecay(initial_rate=0.001,
                                       final_rate=0.0001,
                                       decay_steps=10000)
     opt = optimizers.Adam(learning_rate=rate)
     jaxopt = opt._create_jax_optimizer()
     assert isinstance(jaxopt, optax.GradientTransformation)
Esempio n. 9
0
 def test_PieceWise_decay_jax(self):
     """test creating an optimizer with a PeiceWise constant decay to the learning rate"""
     import optax
     rate = optimizers.PiecewiseConstantSchedule(initial_rate=0.1,
                                                 boundaries_and_scales={
                                                     5000: 0.1,
                                                     10000: 0.1,
                                                     15000: 0.1
                                                 })
     opt = optimizers.Adam(learning_rate=rate)
     jaxopt = opt._create_jax_optimizer()
     assert isinstance(jaxopt, optax.GradientTransformation)
Esempio n. 10
0
 def test_adam_pytorch(self):
     """Test creating an Adam optimizer."""
     opt = optimizers.Adam(learning_rate=0.01)
     params = [torch.nn.Parameter(torch.Tensor([1.0]))]
     torchopt = opt._create_pytorch_optimizer(params)
     assert isinstance(torchopt, torch.optim.Adam)
Esempio n. 11
0
 def test_adam_tf(self):
     """Test creating an Adam optimizer."""
     opt = optimizers.Adam(learning_rate=0.01)
     global_step = tf.Variable(0)
     tfopt = opt._create_tf_optimizer(global_step)
     assert isinstance(tfopt, tf.keras.optimizers.Adam)
Esempio n. 12
0
 def test_adam_jax(self):
     """Test creating an Adam optimizer."""
     import optax
     opt = optimizers.Adam(learning_rate=0.01)
     jaxopt = opt._create_jax_optimizer()
     assert isinstance(jaxopt, optax.GradientTransformation)