コード例 #1
0
ファイル: sparse_optimizers.py プロジェクト: tawawhite/rigl
 def get_drop_fraction(self, global_step, is_mask_update_iter_op):
     """Returns a constant or annealing drop_fraction op."""
     if self._drop_fraction_anneal == 'constant':
         drop_frac = self._drop_fraction_initial_value
     elif self._drop_fraction_anneal == 'cosine':
         decay_steps = self._end_step - self._begin_step
         drop_frac = learning_rate_decay.cosine_decay(
             self._drop_fraction_initial_value,
             global_step,
             decay_steps,
             name='cosine_drop_fraction')
     elif self._drop_fraction_anneal.startswith('exponential'):
         exponent = extract_number(self._drop_fraction_anneal)
         div_dtype = self._drop_fraction_initial_value.dtype
         power = math_ops.divide(
             math_ops.cast(global_step - self._begin_step, div_dtype),
             math_ops.cast(self._end_step - self._begin_step, div_dtype),
         )
         drop_frac = math_ops.multiply(self._drop_fraction_initial_value,
                                       math_ops.pow(1 - power, exponent),
                                       name='%s_drop_fraction' %
                                       self._drop_fraction_anneal)
     else:
         raise ValueError('drop_fraction_anneal: %s is not valid' %
                          self._drop_fraction_anneal)
     return array_ops.where(is_mask_update_iter_op, drop_frac,
                            array_ops.zeros_like(drop_frac))
コード例 #2
0
 def _nested_func(global_step):
     return learning_rate_decay.cosine_decay(
         learning_rate=learning_rate,
         global_step=global_step,
         decay_steps=decay_steps,
         alpha=alpha,
     )
コード例 #3
0
 def testDecay(self):
   num_training_steps = 1000
   initial_lr = 1.0
   for step in range(0, 1500, 250):
     decayed_lr = learning_rate_decay.cosine_decay(initial_lr, step,
                                                   num_training_steps)
     expected = self.np_cosine_decay(step, num_training_steps)
     self.assertAllClose(self.evaluate(decayed_lr), expected, 1e-6)
コード例 #4
0
 def testDecay(self):
     num_training_steps = 1000
     initial_lr = 1.0
     for step in range(0, 1500, 250):
         decayed_lr = learning_rate_decay.cosine_decay(
             initial_lr, step, num_training_steps)
         expected = self.np_cosine_decay(step, num_training_steps)
         self.assertAllClose(self.evaluate(decayed_lr), expected, 1e-6)
コード例 #5
0
 def testAlpha(self):
   num_training_steps = 1000
   initial_lr = 1.0
   alpha = 0.1
   for step in range(0, 1500, 250):
     with self.test_session():
       decayed_lr = learning_rate_decay.cosine_decay(
           initial_lr, step, num_training_steps, alpha)
       expected = self.np_cosine_decay(step, num_training_steps, alpha)
       self.assertAllClose(decayed_lr.eval(), expected, 1e-6)