Пример #1
0
    def test_perceptron_init_errors(self):
        with self.assertRaises(ValueError):
            algorithms.Perceptron((2, 2, 1), verbose=False)

        with self.assertRaises(ValueError):
            algorithms.Perceptron((2, 2.5), verbose=False)

        with self.assertRaises(NetworkConnectionError):
            algorithms.Perceptron(layers.Sigmoid(2) > layers.Output(1),
                                  verbose=False)
Пример #2
0
    def test_gd(self):
        x_train, _, y_train, _ = simple_classification()

        network = algorithms.GradientDescent(
            (layers.Tanh(10) > layers.Tanh(20) > layers.Output(1)),
            step=0.3,
            verbose=False
        )
        network.train(x_train, y_train, epochs=500)
        self.assertAlmostEqual(network.errors.last(), 0.02, places=3)
Пример #3
0
 def test_predict_different_inputs(self):
     for bp_algorithm_class in self.bp_algorithms:
         network = bp_algorithm_class(
             [
                 layers.Linear(
                     size=2, bias=np.zeros(1), weight=np.zeros((2, 1))),
                 layers.Output(1),
             ],
             verbose=False,
         )
         self.assertInvalidVectorPred(network,
                                      input_vector=np.array([0, 0]),
                                      target=0,
                                      is_feature1d=False)
Пример #4
0
    def test_mixture_of_experts(self):
        dataset = datasets.load_diabetes()
        data, target = asfloat(dataset.data), asfloat(dataset.target)
        insize, outsize = data.shape[1], 1

        input_scaler = preprocessing.MinMaxScaler((-1, 1))
        output_scaler = preprocessing.MinMaxScaler()
        x_train, x_test, y_train, y_test = cross_validation.train_test_split(
            input_scaler.fit_transform(data),
            output_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.8)

        n_epochs = 10
        scaled_y_test = output_scaler.inverse_transform(y_test)
        scaled_y_test = scaled_y_test.reshape((y_test.size, 1))

        # -------------- Train single GradientDescent -------------- #

        bpnet = algorithms.GradientDescent((insize, 20, outsize),
                                           step=0.1,
                                           verbose=False)
        bpnet.train(x_train, y_train, epochs=n_epochs)
        network_output = bpnet.predict(x_test)
        network_error = rmsle(output_scaler.inverse_transform(network_output),
                              scaled_y_test)

        # -------------- Train ensemlbe -------------- #

        moe = algorithms.MixtureOfExperts(
            networks=[
                algorithms.Momentum((insize, 20, outsize),
                                    step=0.1,
                                    batch_size=1,
                                    verbose=False),
                algorithms.Momentum((insize, 20, outsize),
                                    step=0.1,
                                    batch_size=1,
                                    verbose=False),
            ],
            gating_network=algorithms.Momentum(
                layers.Softmax(insize) > layers.Output(2),
                step=0.1,
                verbose=False))
        moe.train(x_train, y_train, epochs=n_epochs)
        ensemble_output = moe.predict(x_test)
        ensemlbe_error = rmsle(
            output_scaler.inverse_transform(ensemble_output), scaled_y_test)

        self.assertGreater(network_error, ensemlbe_error)
Пример #5
0
 def test_minibatch_gd(self):
     x_train, _, y_train, _ = simple_classification()
     compare_networks(
        # Test classes
        algorithms.GradientDescent,
        partial(algorithms.MinibatchGradientDescent, batch_size=1),
        # Test data
        (x_train, y_train),
        # Network configurations
        connection=(layers.Tanh(10) > layers.Tanh(20) > layers.Output(1)),
        step=0.1,
        shuffle_data=True,
        verbose=False,
        # Test configurations
        epochs=40,
        show_comparison_plot=False
     )
Пример #6
0
    def test_hessdiag(self):
        x_train, x_test, y_train, y_test = simple_classification()
        nw = algorithms.HessianDiagonal(
            connection=[
                layers.Sigmoid(10, init_method='bounded', bounds=(-1, 1)),
                layers.Sigmoid(20, init_method='bounded', bounds=(-1, 1)),
                layers.Output(1)
            ],
            step=0.1,
            shuffle_data=False,
            verbose=False,
            min_eigval=0.01,
        )
        nw.train(x_train / 2, y_train, epochs=10)
        y_predict = nw.predict(x_test)

        self.assertAlmostEqual(0.10, nw.errors.last(), places=2)
Пример #7
0
    def test_bfgs(self):
        x_train, x_test, y_train, y_test = simple_classification()

        qnnet = algorithms.QuasiNewton(
            connection=[
                layers.Sigmoid(10, init_method='ortho'),
                layers.Sigmoid(25, init_method='ortho'),
                layers.Output(1)
            ],
            shuffle_data=True,
            show_epoch='20 times',
            verbose=False,
        )
        qnnet.train(x_train, y_train, x_test, y_test, epochs=20)
        result = qnnet.predict(x_test).round().astype(int)

        roc_curve_score = metrics.roc_auc_score(result, y_test)
        self.assertAlmostEqual(0.92, roc_curve_score, places=2)
Пример #8
0
 def test_compare_bp_and_hessian(self):
     x_train, _, y_train, _ = simple_classification()
     compare_networks(
         # Test classes
         algorithms.GradientDescent,
         partial(algorithms.HessianDiagonal, min_eigval=0.01),
         # Test data
         (x_train, y_train),
         # Network configurations
         connection=[
             layers.Sigmoid(10, init_method='bounded', bounds=(-1, 1)),
             layers.Sigmoid(20, init_method='bounded', bounds=(-1, 1)),
             layers.Output(1)
         ],
         step=0.1,
         shuffle_data=True,
         verbose=False,
         # Test configurations
         epochs=50,
         show_comparison_plot=False
     )
Пример #9
0
    def test_linear_search(self):
        methods = [
            ('golden', 0.34202),
            ('brent', 0.34942),
        ]

        for method_name, valid_error in methods:
            np.random.seed(self.random_seed)

            dataset = datasets.load_boston()
            data, target = dataset.data, dataset.target

            data_scaler = preprocessing.MinMaxScaler()
            target_scaler = preprocessing.MinMaxScaler()

            x_train, x_test, y_train, y_test = train_test_split(
                data_scaler.fit_transform(data),
                target_scaler.fit_transform(target.reshape(-1, 1)),
                train_size=0.85)

            cgnet = algorithms.ConjugateGradient(
                connection=[
                    layers.Sigmoid(13),
                    layers.Sigmoid(50),
                    layers.Output(1),
                ],
                show_epoch=1,
                verbose=False,
                search_method=method_name,
                tol=0.1,
                addons=[algorithms.LinearSearch],
            )
            cgnet.train(x_train, y_train, epochs=4)
            y_predict = cgnet.predict(x_test).round(1)

            error = rmsle(target_scaler.inverse_transform(y_test),
                          target_scaler.inverse_transform(y_predict))

            self.assertAlmostEqual(valid_error, error, places=5)
Пример #10
0
    def test_quasi_newton_sr1(self):
        x_train, x_test, y_train, y_test = simple_classification()

        qnnet = algorithms.QuasiNewton(
            connection=[
                layers.Sigmoid(10, init_method='ortho'),
                layers.Sigmoid(30, init_method='ortho'),
                layers.Output(1)
            ],
            shuffle_data=True,
            show_epoch=20,
            verbose=False,

            update_function='sr1',
            h0_scale=2,
            gradient_tol=1e-5,
        )
        qnnet.train(x_train, y_train, x_test, y_test, epochs=10)
        result = qnnet.predict(x_test).round()

        roc_curve_score = metrics.roc_auc_score(result, y_test)
        self.assertAlmostEqual(0.92, roc_curve_score, places=2)
Пример #11
0
data = dataset.data
target = dataset.target.reshape((-1, 1))

data_scaler = preprocessing.MinMaxScaler((-3, 3))
target_scaler = preprocessing.MinMaxScaler()

data = data_scaler.fit_transform(data)
target = target_scaler.fit_transform(target)

x_train, x_test, y_train, y_test = train_test_split(data,
                                                    target,
                                                    train_size=0.85)

cgnet = algorithms.Hessian(
    connection=[
        layers.Sigmoid(13),
        layers.Sigmoid(50),
        layers.Sigmoid(10),
        layers.Output(1),
    ],
    verbose=True,
)

cgnet.train(x_train, y_train, x_test, y_test, epochs=3)
y_predict = cgnet.predict(x_test)

y_test = target_scaler.inverse_transform(y_test.reshape((-1, 1)))
y_predict = target_scaler.inverse_transform(y_predict).T.round(1)
error = estimators.rmsle(y_predict, y_test)
print("RMSLE = {}".format(error))
Пример #12
0
    def test_handle_errors(self):
        networks = [
            algorithms.GradientDescent((1, 20, 1), step=0.2, verbose=False),
            algorithms.GradientDescent((1, 20, 1), step=0.2, verbose=False),
        ]

        with self.assertRaises(ValueError):
            # Ivalid network (not GradientDescent)
            algorithms.MixtureOfExperts(
                networks=networks + [algorithms.GRNN(verbose=False)],
                gating_network=algorithms.GradientDescent(
                    layers.Sigmoid(1) > layers.Output(3),
                    verbose=False,
                ))

        with self.assertRaises(ValueError):
            # Ivalid number of outputs in third network
            algorithms.MixtureOfExperts(
                networks=networks + [
                    algorithms.GradientDescent(
                        (1, 20, 2), step=0.2, verbose=False)
                ],
                gating_network=algorithms.GradientDescent(
                    layers.Sigmoid(1) > layers.Output(3),
                    verbose=False,
                ))

        with self.assertRaises(ValueError):
            # Ivalid gating network output layer size
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Softmax(1) > layers.Output(1),
                    verbose=False,
                ))

        with self.assertRaises(ValueError):
            # Ivalid gating network input layer
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Sigmoid(1) > layers.Output(2),
                    verbose=False,
                ))

        with self.assertRaises(ValueError):
            # Ivalid gating network output layer
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Softmax(1) > layers.RoundedOutput(2),
                    verbose=False,
                ))

        with self.assertRaises(ValueError):
            # Ivalid network error function
            algorithms.MixtureOfExperts(
                networks=networks + [
                    algorithms.GradientDescent(
                        (1, 20, 1),
                        step=0.2,
                        error='rmsle',
                        verbose=False,
                    )
                ],
                gating_network=algorithms.GradientDescent(
                    layers.Sigmoid(1) > layers.Output(3),
                    verbose=False,
                ),
            )

        with self.assertRaises(ValueError):
            moe = algorithms.MixtureOfExperts(
                # Ivalid gating error function
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Softmax(1) > layers.Output(2),
                    error='rmsle',
                    verbose=False),
            )

        moe = algorithms.MixtureOfExperts(
            # Ivalid gating network output layer
            networks=networks,
            gating_network=algorithms.GradientDescent(
                layers.Softmax(1) > layers.Output(2), verbose=False),
        )
        with self.assertRaises(ValueError):
            # Wrong number of train input features
            moe.train(np.array([[1, 2]]), np.array([[0]]))

        with self.assertRaises(ValueError):
            # Wrong number of train output features
            moe.train(np.array([[1]]), np.array([[0, 0]]))
Пример #13
0
def get_connection():
    """ Generate new connections every time when we call it.
    """
    input_layer = NoBiasSigmoid(2, weight=default_weight.copy())
    output_layer = layers.Output(1)
    return input_layer > output_layer
Пример #14
0
mnist = datasets.fetch_mldata('MNIST original')

data = mnist.data / 255.
features_mean = data.mean(axis=0)
data = (data - features_mean).astype(np.float32)

np.random.shuffle(data)
x_train, x_test = data[:60000], data[60000:]

autoencoder = algorithms.Momentum(
    [
        layers.Dropout(proba=0.5),
        layers.Sigmoid(784),
        layers.Sigmoid(100),
        layers.Output(784),
    ],
    step=0.25,
    verbose=True,
    momentum=0.99,
    nesterov=True,
)
autoencoder.train(x_train, x_train, x_test, x_test, epochs=100)

n_samples = 4
image_vectors = x_test[:n_samples, :]
images = (image_vectors + features_mean) * 255.
predicted_images = autoencoder.predict(image_vectors)
predicted_images = (predicted_images + features_mean) * 255.

# Compare real and reconstructed images
Пример #15
0
 def test_valid_cases(self):
     algorithms.Perceptron(layers.Step(2) > layers.Output(1), verbose=False)