def test_hessian_diagonal(self): dataset = datasets.load_diabetes() data, target = dataset.data, dataset.target input_scaler = preprocessing.StandardScaler() target_scaler = preprocessing.StandardScaler() x_train, x_test, y_train, y_test = cross_validation.train_test_split( input_scaler.fit_transform(data), target_scaler.fit_transform(target.reshape(-1, 1)), train_size=0.8) nw = algorithms.HessianDiagonal(connection=[ layers.SigmoidLayer(10), layers.SigmoidLayer(20), layers.OutputLayer(1) ], step=1.5, shuffle_data=False, verbose=False, min_eigenvalue=1e-10) nw.train(x_train, y_train, epochs=10) y_predict = nw.predict(x_test) error = rmsle(target_scaler.inverse_transform(y_test), target_scaler.inverse_transform(y_predict).round()) self.assertAlmostEqual(0.5032, error, places=4)
def test_hessdiag(self): x_train, x_test, y_train, y_test = simple_classification() nw = algorithms.HessianDiagonal( connection=[ layers.Sigmoid(10, init_method='bounded', bounds=(-1, 1)), layers.Sigmoid(20, init_method='bounded', bounds=(-1, 1)), layers.Output(1) ], step=0.1, shuffle_data=False, verbose=False, min_eigval=0.01, ) nw.train(x_train / 2, y_train, epochs=10) y_predict = nw.predict(x_test) self.assertAlmostEqual(0.10, nw.errors.last(), places=2)
def test_hessdiag(self): x_train, x_test, y_train, y_test = simple_classification() params = dict(weight=init.Uniform(-0.1, 0.1), bias=init.Uniform(-0.1, 0.1)) nw = algorithms.HessianDiagonal( network=[ layers.Input(10), layers.Sigmoid(20, **params), layers.Sigmoid(1, **params), ], step=0.1, shuffle_data=False, verbose=False, min_eigval=0.1, ) nw.train(x_train, y_train, epochs=50) self.assertGreater(0.2, nw.errors.train[-1])
def test_hessdiag(self): x_train, x_test, y_train, y_test = simple_classification() nw = algorithms.HessianDiagonal( connection=[ layers.Input(10), layers.Sigmoid(20, weight=init.Uniform(-1, 1), bias=init.Uniform(-1, 1)), layers.Sigmoid(1, weight=init.Uniform(-1, 1), bias=init.Uniform(-1, 1)), ], step=0.1, shuffle_data=False, verbose=False, min_eigval=0.01, ) nw.train(x_train / 2, y_train, epochs=10) self.assertAlmostEqual(0.10, nw.errors.last(), places=2)
def select_algorithm(self, algorithm, options=None): try: self.network = algorithms.LevenbergMarquardt(self.layers) opt = options print(opt[1]) print("Wybrano optymalizator: " + str(algorithm)) except RecursionError: print("Problem rekursji") return None if algorithm == 'GradientDescent': self.network = algorithms.GradientDescent(self.layers) if algorithm == 'LevenbergMarquardt': self.network = algorithms.LevenbergMarquardt(connection=self.layers, mu=opt[0], mu_update_factor=opt[1]) if algorithm == 'Adam': self.network = algorithms.Adam(self.layers) if algorithm == 'QuasiNewton': self.network = algorithms.QuasiNewton(self.layers) if algorithm == 'Quickprop': self.network = algorithms.Quickprop(self.layers) if algorithm == 'MinibatchGradientDescent': self.network = algorithms.MinibatchGradientDescent(self.layers) if algorithm == 'ConjugateGradient': self.network = algorithms.ConjugateGradient(self.layers) if algorithm == 'Hessian': self.network = algorithms.Hessian(self.layers) if algorithm == 'HessianDiagonal': self.network = algorithms.HessianDiagonal(self.layers) if algorithm == 'Momentum': self.network = algorithms.Momentum(self.layers) if algorithm == 'RPROP': self.network = algorithms.RPROP(self.layers) if algorithm == 'IRPROPPlus': self.network = algorithms.IRPROPPlus(self.layers) if algorithm == 'Adadelta': self.network = algorithms.Adadelta(self.layers) if algorithm == 'Adagrad': self.network = algorithms.Adagrad(self.layers) if algorithm == 'RMSProp': self.network = algorithms.RMSProp(self.layers) if algorithm == 'Adamax': self.network = algorithms.Adamax(self.layers)