示例#1
0
 def test_adagrad(self):
     x_train, x_test, y_train, y_test = simple_classification()
     optimizer = algorithms.Adagrad(
         self.network,
         step=0.1,
         batch_size=None,
         verbose=False,
     )
     optimizer.train(x_train, y_train, x_test, y_test, epochs=150)
     self.assertGreater(0.15, optimizer.errors.valid[-1])
示例#2
0
 def test_simple_adagrad(self):
     x_train, _, y_train, _ = simple_classification()
     mnet = algorithms.Adagrad(
         (10, 20, 1),
         step=2.,
         batch_size='full',
         verbose=False,
         epsilon=1e-5,
     )
     mnet.train(x_train, y_train, epochs=100)
     self.assertAlmostEqual(0.068, mnet.errors.last(), places=3)
示例#3
0
 def test_simple_adagrad(self):
     x_train, x_test, y_train, y_test = simple_classification()
     mnet = algorithms.Adagrad(
         (10, 20, 1),
         step=0.1,
         batch_size='full',
         verbose=False,
         epsilon=1e-5,
     )
     mnet.train(x_train, y_train, x_test, y_test, epochs=100)
     self.assertGreater(0.15, mnet.validation_errors.last())
示例#4
0
	def select_algorithm(self, algorithm, options=None):
		try:
			self.network = algorithms.LevenbergMarquardt(self.layers)
			opt = options
			print(opt[1])
			print("Wybrano optymalizator: " + str(algorithm))
		except RecursionError:
			print("Problem rekursji")
			return None

		if algorithm == 'GradientDescent':
			self.network = algorithms.GradientDescent(self.layers)
		if algorithm == 'LevenbergMarquardt':
			self.network = algorithms.LevenbergMarquardt(connection=self.layers, mu=opt[0], mu_update_factor=opt[1])
		if algorithm == 'Adam':
			self.network = algorithms.Adam(self.layers)
		if algorithm == 'QuasiNewton':
			self.network = algorithms.QuasiNewton(self.layers)
		if algorithm == 'Quickprop':
			self.network = algorithms.Quickprop(self.layers)
		if algorithm == 'MinibatchGradientDescent':
			self.network = algorithms.MinibatchGradientDescent(self.layers)
		if algorithm == 'ConjugateGradient':
			self.network = algorithms.ConjugateGradient(self.layers)
		if algorithm == 'Hessian':
			self.network = algorithms.Hessian(self.layers)
		if algorithm == 'HessianDiagonal':
			self.network = algorithms.HessianDiagonal(self.layers)
		if algorithm == 'Momentum':
			self.network = algorithms.Momentum(self.layers)
		if algorithm == 'RPROP':
			self.network = algorithms.RPROP(self.layers)
		if algorithm == 'IRPROPPlus':
			self.network = algorithms.IRPROPPlus(self.layers)
		if algorithm == 'Adadelta':
			self.network = algorithms.Adadelta(self.layers)
		if algorithm == 'Adagrad':
			self.network = algorithms.Adagrad(self.layers)
		if algorithm == 'RMSProp':
			self.network = algorithms.RMSProp(self.layers)
		if algorithm == 'Adamax':
			self.network = algorithms.Adamax(self.layers)