Ejemplo n.º 1
0
 def test12_optimizer_create_updates(self,mock_shared,mock_polyak,mock_adagrad,mock_constrain,mock_rmsprop,mock_sgd,mock_adam,mock_adadelta,mock_nesterov):
     try:
         mock_polyak.return_value = ""
         mock_adagrad.return_value = ""
         mock_constrain.return_value = ""
         mock_rmsprop.return_value = ""
         mock_sgd.return_value = ""
         mock_adam.return_value = ""
         mock_adadelta.return_value = ""
         mock_nesterov.return_value = ""
         mock_shared.return_value = self.learning_val
         self.opt = opt(
                 optimizer_init_args = self.optimizer_params,
                 verbose = self.verbose)
         self.opt.optimizer_type ="adagrad"
         self.opt.momentum_type ="polyak"
         self.opt.create_updates(verbose=self.verbose)
         self.opt.optimizer_type ="rmsprop"
         self.opt.momentum_type ="nesterov"
         self.opt.create_updates(verbose=self.verbose)
         self.opt.optimizer_type ="sgd"
         self.opt.momentum_type ="polyak"
         self.opt.create_updates(verbose=self.verbose)
         self.opt.optimizer_type ="adam"
         self.opt.momentum_type ="polyak"
         self.opt.create_updates(verbose=self.verbose)
         self.opt.optimizer_type ="adadelta"
         self.opt.momentum_type ="polyak"
         self.opt.create_updates(verbose=self.verbose)
         self.opt.optimizer_type = "else"
         self.opt.create_updates(verbose=self.verbose)
         self.assertEqual(True, True)
     except Exception,c:
         self.assertEqual(True,False)
Ejemplo n.º 2
0
 def test3_optimizer_calculate_gradients(self,mock_scalar,mock_shared, mock_grad):
     mock_grad.return_value = 1
     mock_shared.return_value = self.learning_val
     mock_scalar.return_value = self.scalar_value
     self.opt = opt(
             optimizer_init_args = self.optimizer_params,
             verbose = self.verbose)
     self.opt.calculate_gradients(params= self.input_params, objective=self.input_objective, verbose = self.verbose)
     self.assertTrue(numpy.allclose(self.opt.gradients,self.input_params))
Ejemplo n.º 3
0
 def test2_optimizer_init_momentum(self,mock_scalar,mock_shared, mock_ifelse):
     mock_ifelse.return_value = self.sample
     mock_shared.return_value = self.learning_val
     mock_scalar.return_value = self.scalar_value
     self.opt = opt(
             optimizer_init_args = self.optimizer_params,
             verbose = self.verbose)
     self.opt._init_momentum()
     self.assertEqual(self.opt.momentum,self.scalar_value)
Ejemplo n.º 4
0
 def test13_optimizer_no_init(self,mock_scalar,mock_shared):
     mock_shared.return_value = self.learning_val
     mock_scalar.return_value = self.scalar_value
     optimizer_params = {
     }
     self.opt = opt(
             optimizer_init_args = optimizer_params,
             verbose = self.verbose)
     self.assertEqual(self.opt.id,'-1')
Ejemplo n.º 5
0
 def test4_optimizer_calculate_gradients_exception(self,mock_scalar,mock_shared):
     try:
         mock_shared.return_value = self.learning_val
         mock_scalar.return_value = self.scalar_value
         self.opt = opt(
                 optimizer_init_args = self.optimizer_params,
                 verbose = self.verbose)
         self.opt.calculate_gradients(params= self.input_params, objective=False, verbose = self.verbose)
     except Exception, c:
         self.assertEqual(c.message, self.gradiant_exception_msg)
Ejemplo n.º 6
0
 def test1_optimizer(self,mock_scalar,mock_shared):
     mock_shared.return_value = self.learning_val
     mock_scalar.return_value = self.scalar_value
     self.opt = opt(
             optimizer_init_args = self.optimizer_params,
             verbose = self.verbose)
     self.assertEqual(self.opt.id,self.optimizer_id)
     self.assertEqual(self.opt.momentum_start,self.momentum_params[0])
     self.assertEqual(self.opt.momentum_end, self.momentum_params[1])
     self.assertEqual(self.opt.momentum_epoch_end, self.momentum_params[2])
     self.assertEqual(self.opt.momentum_type,self.momentum_type)
     self.assertEqual(self.opt.epoch,self.scalar_value)
     self.assertEqual(self.opt.learning_rate,self.learning_val)
Ejemplo n.º 7
0
 def test11_optimizer_constrain(self,mock_shared):
     A = theano.shared(numpy.asarray([1, 1, 1], dtype=theano.config.floatX))
     B = theano.shared(numpy.asarray([1, 1, 1], dtype=theano.config.floatX))
     mock_shared.return_value = self.learning_val
     self.opt = opt(
             optimizer_init_args = self.optimizer_params,
             verbose = self.verbose)
     self.opt.params = [A, B]
     self.opt.updates = self.f(A) + self.f(B)
     self.opt.gradients = [0,0]
     self.opt.learning_rate = 0
     self.opt._constrain(verbose=self.verbose)
     self.assertTrue(numpy.allclose(self.opt.updates[0],A))
Ejemplo n.º 8
0
 def test5_optimizer_nesterov(self,mock_scalar, mock_shared,mock_ifelse):#mock_shared
     A = theano.shared(numpy.asarray([1, 1, 1], dtype=theano.config.floatX))
     B = theano.shared(numpy.asarray([1, 1, 1], dtype=theano.config.floatX))
     mock_ifelse.return_value = 0.9
     mock_shared.return_value = self.learning_val
     mock_scalar.return_value = self.scalar_value
     self.opt = opt(
             optimizer_init_args = self.optimizer_params,
             verbose = self.verbose)
     self.opt._init_momentum()
     self.opt.params = [A, B]
     self.opt.updates = self.f(A) + self.f(B)
     self.opt._nesterov()
     self.assertTrue(numpy.allclose(self.opt.updates[0],A))