def test_leak_step_adaptation(self):
        compare_networks(
            # Test classes
            algorithms.GradientDescent,
            partial(
                algorithms.GradientDescent,
                leak_size=0.05,
                alpha=0.05,
                beta=5,
                addons=[algorithms.LeakStepAdaptation]
            ),

            # Test data
            (even_input_train, even_target_train),

            # Network configurations
            connection=[
                layers.Sigmoid(2),
                layers.Tanh(3),
                layers.Output(1)
            ],
            step=0.1,
            verbose=False,
            shuffle_data=True,
            epochs=30,
            # show_comparison_plot=True,
        )
예제 #2
0
 def test_compare_bp_and_rprop(self):
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         algorithms.RPROP,
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         show_comparison_plot=False)
예제 #3
0
 def test_compare_bp_and_hessian(self):
     x_train, x_test, y_train, y_test = simple_classification()
     compare_networks(
         # Test classes
         partial(algorithms.GradientDescent, batch_size='all'),
         partial(algorithms.Hessian, penalty_const=1),
         # Test data
         (x_train, y_train, x_test, y_test),
         # Network configurations
         connection=(10, 15, 1),
         shuffle_data=True,
         verbose=False,
         show_epoch=1,
         # Test configurations
         epochs=5,
         show_comparison_plot=False)
 def test_adaptive_learning_rate(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         Backpropagation,
         partial(
             Backpropagation,
             # Adaptive learning rate settings
             leak_size=0.5,
             alpha=0.5,
             beta=0.5,
             optimizations=[LeakStepAdaptation]
         ),
         # Test data
         (even_input_train, even_target_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         use_raw_predict_at_error=True,
         shuffle_data=True,
         # Adaptive learning rate parameters
         # Test configurations
         epochs=30,
         # is_comparison_plot=True,
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #5
0
 def test_compare_quickprop_and_bp(self):
     x_train, _, y_train, _ = self.data
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         partial(algorithms.Quickprop, upper_bound=0.5),
         # Test data
         (x_train, y_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         shuffle_data=True,
         # Test configurations
         epochs=100,
         verbose=False,
         show_comparison_plot=False)
예제 #6
0
 def test_compare_bp_and_rprop(self):
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         algorithms.RPROP,
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         show_comparison_plot=False
     )
예제 #7
0
 def test_compare_bp_and_hessian(self):
     x_train, x_test, y_train, y_test = simple_classification()
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         partial(algorithms.Hessian, penalty_const=1),
         # Test data
         (x_train, y_train, x_test, y_test),
         # Network configurations
         connection=(10, 15, 1),
         shuffle_data=True,
         verbose=False,
         show_epoch=1,
         # Test configurations
         epochs=5,
         show_comparison_plot=False
     )
예제 #8
0
파일: test_gd.py 프로젝트: itdxer/neupy
 def test_minibatch_gd(self):
     x_train, _, y_train, _ = simple_classification()
     compare_networks(
        # Test classes
        algorithms.GradientDescent,
        partial(algorithms.MinibatchGradientDescent, batch_size=1),
        # Test data
        (x_train, y_train),
        # Network configurations
        connection=(layers.Input(10) > layers.Tanh(20) > layers.Tanh(1)),
        step=0.1,
        shuffle_data=True,
        verbose=False,
        # Test configurations
        epochs=40,
        show_comparison_plot=False
     )
예제 #9
0
 def test_compare_quickprop_and_bp(self):
     x_train, _, y_train, _ = self.data
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         partial(algorithms.Quickprop, upper_bound=0.5),
         # Test data
         (x_train, y_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         shuffle_data=True,
         # Test configurations
         epochs=100,
         verbose=False,
         show_comparison_plot=False
     )
예제 #10
0
 def test_with_minibatch(self):
     x_train, _, y_train, _ = simple_classification()
     compare_networks(
        # Test classes
        partial(algorithms.Momentum, batch_size='full'),
        partial(algorithms.Momentum, batch_size=1),
        # Test data
        (x_train, y_train),
        # Network configurations
        connection=(10, 20, 1),
        step=0.25,
        momentum=0.1,
        shuffle_data=True,
        verbose=False,
        # Test configurations
        epochs=40,
        show_comparison_plot=False,
     )
예제 #11
0
 def test_compare_bp_and_cg(self):
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         partial(
             algorithms.ConjugateGradient,
             update_function='fletcher_reeves'
         ),
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         error='categorical_crossentropy',
         shuffle_data=True,
         # Test configurations
         epochs=50,
         show_comparison_plot=False
     )
예제 #12
0
 def test_compare_bp_and_hessian(self):
     x_train, _, y_train, _ = simple_classification()
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         partial(algorithms.HessianDiagonal, min_eigval=0.01),
         # Test data
         (x_train, y_train),
         # Network configurations
         connection=[
             layers.Sigmoid(10, init_method='bounded', bounds=(-1, 1)),
             layers.Sigmoid(20, init_method='bounded', bounds=(-1, 1)),
             layers.Output(1)
         ],
         step=0.1,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         show_comparison_plot=False
     )
예제 #13
0
    def test_compare_bp_and_cg(self):
        x_train, x_test, y_train, y_test = simple_classification()

        compare_networks(
            # Test classes
            partial(
                partial(algorithms.GradientDescent, batch_size='all'),
                step=1.0,
            ),
            partial(algorithms.ConjugateGradient,
                    update_function='fletcher_reeves'),
            # Test data
            (asfloat(x_train), asfloat(y_train)),
            # Network configurations
            connection=layers.join(
                layers.Input(10),
                layers.Sigmoid(5),
                layers.Sigmoid(1),
            ),
            error='mse',
            shuffle_data=True,
            # Test configurations
            epochs=50,
            show_comparison_plot=False)
예제 #14
0
파일: test_rprop.py 프로젝트: Neocher/neupy
 def test_compare_bp_and_rprop(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         algorithms.RPROP,
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         # is_comparison_plot=True
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #15
0
 def test_compare_quickprop_and_bp(self):
     x_train, _, y_train, _ = self.data
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(algorithms.Quickprop, upper_bound=0.5),
         # Test data
         (x_train, y_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         shuffle_data=True,
         # Test configurations
         epochs=100,
         verbose=False,
         is_comparison_plot=False)
     self.assertGreater(network_default_error, network_tested_error)
예제 #16
0
 def test_stochastic_gradient_descent(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(algorithms.MinibatchGradientDescent, batch_size=4),
         # Test data
         (xor_input_train, xor_target_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         use_raw_predict_at_error=True,
         shuffle_data=True,
         # Test configurations
         epochs=40,
         # is_comparison_plot=True
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #17
0
파일: test_rprop.py 프로젝트: zhdbeng/neupy
 def test_compare_bp_and_rprop(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         algorithms.RPROP,
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         # is_comparison_plot=True
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #18
0
 def test_compare_quickprop_and_bp(self):
     x_train, _, y_train, _ = self.data
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(algorithms.Quickprop, upper_bound=0.5),
         # Test data
         (x_train, y_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         shuffle_data=True,
         # Test configurations
         epochs=100,
         verbose=False,
         is_comparison_plot=False
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #19
0
 def test_compare_bp_and_cg(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(algorithms.ConjugateGradient,
                 update_function='fletcher_reeves'),
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         error=cross_entropy_error,
         use_raw_predict_at_error=False,
         shuffle_data=True,
         # Test configurations
         epochs=50,
         # is_comparison_plot=True
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #20
0
 def test_compare_bp_and_cg(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(
             algorithms.ConjugateGradient,
             update_function='fletcher_reeves'
         ),
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=self.connection,
         step=1,
         error=cross_entropy_error,
         shuffle_data=True,
         # Test configurations
         epochs=50,
         # is_comparison_plot=True
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #21
0
 def test_wolfe_search(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(
             algorithms.Backpropagation,
             optimizations=[algorithms.WolfeSearch],
             maxstep=10,
             c1=1e-5,
             c2=0.95,
         ),
         # Test data
         (simple_input_train, simple_target_train),
         # Network configurations
         connection=(3, 10, 2),
         step=0.2,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         # is_comparison_plot=True
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #22
0
 def test_adaptive_learning_rate(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         Backpropagation,
         partial(
             Backpropagation,
             # Adaptive learning rate settings
             leak_size=0.5,
             alpha=0.5,
             beta=0.5,
             optimizations=[LeakStepAdaptation]),
         # Test data
         (even_input_train, even_target_train),
         # Network configurations
         connection=self.connection,
         step=0.1,
         use_raw_predict_at_error=True,
         shuffle_data=True,
         # Adaptive learning rate parameters
         # Test configurations
         epochs=30,
         # is_comparison_plot=True,
     )
     self.assertGreater(network_default_error, network_tested_error)
예제 #23
0
 def test_adaptive_learning_rate(self):
     network_default_error, network_tested_error = compare_networks(
         # Test classes
         algorithms.Backpropagation,
         partial(
             algorithms.Backpropagation,
             # Adaptive learning rate settings
             leak_size=0.5,
             alpha=0.5,
             beta=0.5,
             optimizations=[algorithms.LeakStepAdaptation],
         ),
         # Test data
         (even_input_train, even_target_train),
         # Network configurations
         connection=[layers.SigmoidLayer(2), layers.TanhLayer(3), layers.OutputLayer(1)],
         step=0.1,
         shuffle_data=True,
         # Adaptive learning rate parameters
         # Test configurations
         epochs=30,
         # is_comparison_plot=True,
     )
     self.assertGreater(network_default_error, network_tested_error)