def test_mixture_of_experts(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target
        insize, outsize = data.shape[1], 1

        input_scaler = preprocessing.MinMaxScaler((-1 ,1))
        output_scaler = preprocessing.MinMaxScaler()
        x_train, x_test, y_train, y_test = cross_validation.train_test_split(
            input_scaler.fit_transform(data),
            output_scaler.fit_transform(target),
            train_size=0.8
        )

        n_epochs = 300
        scaled_y_test = output_scaler.inverse_transform(y_test).reshape(
            (y_test.size, 1)
        )

        # -------------- Train single Backpropagation -------------- #

        bpnet = algorithms.Backpropagation(
            (insize, 20, outsize),
            step=0.1,
            verbose=False
        )
        bpnet.train(x_train, y_train, epochs=n_epochs)
        network_output = bpnet.predict(x_test)
        network_error = rmsle(output_scaler.inverse_transform(network_output),
                              scaled_y_test)

        # -------------- Train ensemlbe -------------- #

        moe = ensemble.MixtureOfExperts(
            networks=[
                algorithms.Backpropagation(
                    (insize, 20, outsize),
                    step=0.1,
                    verbose=False
                ),
                algorithms.Backpropagation(
                    (insize, 20, outsize),
                    step=0.1,
                    verbose=False
                ),
            ],
            gating_network=algorithms.Backpropagation(
                layers.SoftmaxLayer(insize) > layers.OutputLayer(2),
                step=0.1,
                verbose=False
            )
        )
        moe.train(x_train, y_train, epochs=n_epochs)
        ensemble_output = moe.predict(x_test)
        ensemlbe_error = rmsle(
            output_scaler.inverse_transform(ensemble_output),
            scaled_y_test
        )

        self.assertGreater(network_error, ensemlbe_error)
Esempio n. 2
0
    def test_pandas_for_bp(self):
        dataset = datasets.load_diabetes()

        input_scaler = preprocessing.MinMaxScaler()
        target_scaler = preprocessing.MinMaxScaler()

        n_features = dataset.data.shape[1]
        input_columns = ['column_' + str(i) for i in range(n_features)]

        pandas_data = pd.DataFrame(dataset.data, columns=input_columns)
        pandas_data['target'] = target_scaler.fit_transform(dataset.target)
        pandas_data[input_columns] = input_scaler.fit_transform(
            pandas_data[input_columns])

        x_train, x_test, y_train, y_test = train_test_split(
            pandas_data[input_columns], pandas_data['target'], train_size=0.85)

        bpnet = algorithms.Backpropagation(connection=[
            layers.SigmoidLayer(10),
            layers.SigmoidLayer(40),
            layers.OutputLayer(1),
        ],
                                           use_bias=True,
                                           show_epoch=100)
        bpnet.train(x_train, y_train, epochs=1000)
        y_predict = bpnet.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        self.assertAlmostEqual(0.4477, error, places=4)
Esempio n. 3
0
    def test_simple_grnn(self):
        dataset = datasets.load_diabetes()
        x_train, x_test, y_train, y_test = train_test_split(
            dataset.data, dataset.target, train_size=0.7
        )

        x_train_before = x_train.copy()
        x_test_before = x_test.copy()
        y_train_before = y_train.copy()

        grnnet = algorithms.GRNN(std=0.1, verbose=False)
        grnnet.train(x_train, y_train)
        result = grnnet.predict(x_test)
        error = rmsle(result, y_test)

        old_result = result.copy()
        self.assertAlmostEqual(error, 0.4245, places=4)

        # Test problem with variable links
        np.testing.assert_array_equal(x_train, x_train_before)
        np.testing.assert_array_equal(x_test, x_test_before)
        np.testing.assert_array_equal(y_train, y_train_before)

        x_train[:, :] = 0
        result = grnnet.predict(x_test)
        total_classes_prob = np.round(result.sum(axis=1), 10)
        np.testing.assert_array_almost_equal(result, old_result)
Esempio n. 4
0
    def test_levenberg_marquardt(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target

        data_scaler = preprocessing.MinMaxScaler()
        target_scaler = preprocessing.MinMaxScaler()

        x_train, x_test, y_train, y_test = train_test_split(
            data_scaler.fit_transform(data),
            target_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.85
        )

        # Network
        lmnet = algorithms.LevenbergMarquardt(
            connection=[
                layers.SigmoidLayer(10),
                layers.SigmoidLayer(40),
                layers.OutputLayer(1),
            ],
            mu_increase_factor=2,
            mu=0.1,
            show_epoch=10,
            use_bias=False,
            verbose=False,
        )
        lmnet.train(x_train, y_train, epochs=100)
        y_predict = lmnet.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        error

        self.assertAlmostEqual(0.4372, error, places=4)
Esempio n. 5
0
    def test_pipeline(self):
        dataset = datasets.load_diabetes()
        target_scaler = preprocessing.MinMaxScaler()
        target = dataset.target.reshape(-1, 1)

        x_train, x_test, y_train, y_test = train_test_split(
            dataset.data,
            target_scaler.fit_transform(target),
            train_size=0.85
        )

        network = algorithms.Backpropagation(
            connection=[
                layers.SigmoidLayer(10),
                layers.SigmoidLayer(40),
                layers.OutputLayer(1),
            ],
            use_bias=True,
            show_epoch=100,
            verbose=False,
        )
        pipeline = Pipeline([
            ('min_max_scaler', preprocessing.MinMaxScaler()),
            ('backpropagation', network),
        ])
        pipeline.fit(x_train, y_train, backpropagation__epochs=1000)
        y_predict = pipeline.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        self.assertAlmostEqual(0.4481, error, places=4)
Esempio n. 6
0
    def test_hessian_diagonal(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target

        input_scaler = preprocessing.StandardScaler()
        target_scaler = preprocessing.StandardScaler()

        x_train, x_test, y_train, y_test = cross_validation.train_test_split(
            input_scaler.fit_transform(data),
            target_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.8)

        nw = algorithms.HessianDiagonal(connection=[
            layers.SigmoidLayer(10),
            layers.SigmoidLayer(20),
            layers.OutputLayer(1)
        ],
                                        step=1.5,
                                        shuffle_data=False,
                                        verbose=False,
                                        min_eigenvalue=1e-10)
        nw.train(x_train, y_train, epochs=10)
        y_predict = nw.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())

        self.assertAlmostEqual(0.5032, error, places=4)
Esempio n. 7
0
    def test_levenberg_marquardt(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target

        data_scaler = preprocessing.MinMaxScaler()
        target_scaler = preprocessing.MinMaxScaler()

        x_train, x_test, y_train, y_test = train_test_split(
            data_scaler.fit_transform(data),
            target_scaler.fit_transform(target),
            train_size=0.85)

        # Network
        lmnet = algorithms.LevenbergMarquardt(connection=[
            layers.SigmoidLayer(10),
            layers.SigmoidLayer(40),
            layers.OutputLayer(1),
        ],
                                              mu_increase_factor=2,
                                              mu=0.1,
                                              show_epoch=10,
                                              use_bias=False)
        lmnet.train(x_train, y_train, epochs=100)
        y_predict = lmnet.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        error

        self.assertAlmostEqual(0.4372, error, places=4)
Esempio n. 8
0
    def test_pipeline(self):
        dataset = datasets.load_diabetes()
        target_scaler = preprocessing.MinMaxScaler()

        x_train, x_test, y_train, y_test = train_test_split(
            dataset.data,
            target_scaler.fit_transform(dataset.target),
            train_size=0.85)

        network = algorithms.Backpropagation(
            connection=[
                layers.SigmoidLayer(10),
                layers.SigmoidLayer(40),
                layers.OutputLayer(1),
            ],
            use_bias=True,
            show_epoch=100,
            verbose=False,
        )
        pipeline = Pipeline([
            ('min_max_scaler', preprocessing.MinMaxScaler()),
            ('backpropagation', network),
        ])
        pipeline.fit(x_train, y_train, backpropagation__epochs=1000)
        y_predict = pipeline.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        self.assertAlmostEqual(0.4481, error, places=4)
Esempio n. 9
0
    def test_hessian_diagonal(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target

        input_scaler = preprocessing.StandardScaler()
        target_scaler = preprocessing.StandardScaler()

        x_train, x_test, y_train, y_test = cross_validation.train_test_split(
            input_scaler.fit_transform(data),
            target_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.8
        )

        nw = algorithms.HessianDiagonal(
            connection=[
                layers.SigmoidLayer(10),
                layers.SigmoidLayer(20),
                layers.OutputLayer(1)
            ],
            step=1.5,
            shuffle_data=False,
            verbose=False,
            min_eigenvalue=1e-10
        )
        nw.train(x_train, y_train, epochs=10)
        y_predict = nw.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())

        self.assertAlmostEqual(0.5032, error, places=4)
Esempio n. 10
0
    def test_mixture_of_experts(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target
        insize, outsize = data.shape[1], 1

        input_scaler = preprocessing.MinMaxScaler((-1, 1))
        output_scaler = preprocessing.MinMaxScaler()
        x_train, x_test, y_train, y_test = cross_validation.train_test_split(
            input_scaler.fit_transform(data),
            output_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.8)

        n_epochs = 300
        scaled_y_test = output_scaler.inverse_transform(y_test).reshape(
            (y_test.size, 1))

        # -------------- Train single Backpropagation -------------- #

        bpnet = algorithms.Backpropagation((insize, 20, outsize),
                                           step=0.1,
                                           verbose=False)
        bpnet.train(x_train, y_train, epochs=n_epochs)
        network_output = bpnet.predict(x_test)
        network_error = rmsle(output_scaler.inverse_transform(network_output),
                              scaled_y_test)

        # -------------- Train ensemlbe -------------- #

        moe = ensemble.MixtureOfExperts(
            networks=[
                algorithms.Backpropagation((insize, 20, outsize),
                                           step=0.1,
                                           verbose=False),
                algorithms.Backpropagation((insize, 20, outsize),
                                           step=0.1,
                                           verbose=False),
            ],
            gating_network=algorithms.Backpropagation(
                layers.SoftmaxLayer(insize) > layers.OutputLayer(2),
                step=0.1,
                verbose=False))
        moe.train(x_train, y_train, epochs=n_epochs)
        ensemble_output = moe.predict(x_test)
        ensemlbe_error = rmsle(
            output_scaler.inverse_transform(ensemble_output), scaled_y_test)

        self.assertGreater(network_error, ensemlbe_error)
Esempio n. 11
0
    def test_simple_grnn(self):
        dataset = datasets.load_diabetes()
        x_train, x_test, y_train, y_test = train_test_split(
            dataset.data, dataset.target, train_size=0.7,
            random_state=0
        )

        nw = GRNN(standard_deviation=0.1)
        nw.train(x_train, y_train)
        result = nw.predict(x_test)
        error = rmsle(result, y_test)

        self.assertAlmostEqual(error, 0.4245, places=4)
Esempio n. 12
0
    def test_linear_search(self):
        methods = [
            ('golden', 0.20976),
            ('brent', 0.21190),
        ]

        for method_name, valid_error in methods:
            np.random.seed(self.random_seed)

            dataset = datasets.load_boston()
            data, target = dataset.data, dataset.target

            data_scaler = preprocessing.MinMaxScaler()
            target_scaler = preprocessing.MinMaxScaler()

            x_train, x_test, y_train, y_test = train_test_split(
                data_scaler.fit_transform(data),
                target_scaler.fit_transform(target),
                train_size=0.85
            )

            cgnet = algorithms.ConjugateGradient(
                connection=[
                    layers.SigmoidLayer(13),
                    layers.SigmoidLayer(50),
                    layers.OutputLayer(1),
                ],
                search_method=method_name,
                show_epoch=25,
                optimizations=[algorithms.LinearSearch],
            )
            cgnet.train(x_train, y_train, epochs=100)
            y_predict = cgnet.predict(x_test).round(1)

            error = rmsle(target_scaler.inverse_transform(y_test),
                          target_scaler.inverse_transform(y_predict))

            self.assertAlmostEqual(valid_error, error, places=5)
Esempio n. 13
0
    def test_linear_search(self):
        methods = [
            ('golden', 0.20976),
            ('brent', 0.21190),
        ]

        for method_name, valid_error in methods:
            np.random.seed(self.random_seed)

            dataset = datasets.load_boston()
            data, target = dataset.data, dataset.target

            data_scaler = preprocessing.MinMaxScaler()
            target_scaler = preprocessing.MinMaxScaler()

            x_train, x_test, y_train, y_test = train_test_split(
                data_scaler.fit_transform(data),
                target_scaler.fit_transform(target.reshape(-1, 1)),
                train_size=0.85
            )

            cgnet = algorithms.ConjugateGradient(
                connection=[
                    layers.SigmoidLayer(13),
                    layers.SigmoidLayer(50),
                    layers.OutputLayer(1),
                ],
                search_method=method_name,
                show_epoch=25,
                optimizations=[algorithms.LinearSearch],
            )
            cgnet.train(x_train, y_train, epochs=100)
            y_predict = cgnet.predict(x_test).round(1)

            error = rmsle(target_scaler.inverse_transform(y_test),
                          target_scaler.inverse_transform(y_predict))

            self.assertAlmostEqual(valid_error, error, places=5)
Esempio n. 14
0
    def test_pandas_for_bp(self):
        dataset = datasets.load_diabetes()
        target = dataset.target.reshape(-1, 1)

        input_scaler = preprocessing.MinMaxScaler()
        target_scaler = preprocessing.MinMaxScaler()

        n_features = dataset.data.shape[1]
        input_columns = ['column_' + str(i) for i in range(n_features)]

        pandas_data = pd.DataFrame(dataset.data, columns=input_columns)
        pandas_data['target'] = target_scaler.fit_transform(target)
        pandas_data[input_columns] = input_scaler.fit_transform(
            pandas_data[input_columns]
        )

        x_train, x_test, y_train, y_test = train_test_split(
            pandas_data[input_columns],
            pandas_data['target'],
            train_size=0.85
        )

        bpnet = algorithms.Backpropagation(
            connection=[
                layers.SigmoidLayer(10),
                layers.SigmoidLayer(40),
                layers.OutputLayer(1),
            ],
            use_bias=True,
            show_epoch=100
        )
        bpnet.train(x_train, y_train, epochs=1000)
        y_predict = bpnet.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        self.assertAlmostEqual(0.4477, error, places=4)
Esempio n. 15
0
data_scaler = preprocessing.MinMaxScaler()
target_scaler = preprocessing.MinMaxScaler()

data = data_scaler.fit_transform(data)
target = target_scaler.fit_transform(target)

x_train, x_test, y_train, y_test = train_test_split(data,
                                                    target,
                                                    train_size=0.85)

cgnet = algorithms.ConjugateGradient(
    connection=[
        layers.SigmoidLayer(13),
        layers.SigmoidLayer(50),
        layers.OutputLayer(1),
    ],
    search_method='golden',
    show_epoch=25,
    verbose=True,
    optimizations=[algorithms.LinearSearch],
)

cgnet.train(x_train, y_train, x_test, y_test, epochs=100)
cgnet.plot_errors()

y_predict = cgnet.predict(x_test).round(1)
error = rmsle(target_scaler.inverse_transform(y_test),
              target_scaler.inverse_transform(y_predict))
print("RMSLE = {}".format(error))
Esempio n. 16
0
 def scorer(network, X, y):
     result = network.predict(X)
     return rmsle(result, y)
Esempio n. 17
0
def scorer(network, X, y):
    result = network.predict(X)
    return functions.rmsle(result, y)
Esempio n. 18
0
data, target = dataset.data, dataset.target

data_scaler = preprocessing.MinMaxScaler()
target_scaler = preprocessing.MinMaxScaler()

data = data_scaler.fit_transform(data)
target = target_scaler.fit_transform(target)

x_train, x_test, y_train, y_test = train_test_split(
    data, target, train_size=0.85
)

cgnet = algorithms.ConjugateGradient(
    connection=[
        layers.SigmoidLayer(13),
        layers.SigmoidLayer(50),
        layers.OutputLayer(1),
    ],
    search_method='golden',
    show_epoch=25,
    optimizations=[algorithms.LinearSearch],
)

cgnet.train(x_train, y_train, x_test, y_test, epochs=100)
cgnet.plot_errors()

y_predict = cgnet.predict(x_test).round(1)
error = rmsle(target_scaler.inverse_transform(y_test),
              target_scaler.inverse_transform(y_predict))
print("RMSLE = {}".format(error))
Esempio n. 19
0
 def scorer(network, X, y):
     result = network.predict(X)
     return rmsle(result, y)
Esempio n. 20
0
def scorer(network, X, y):
    result = network.predict(X)
    return functions.rmsle(result, y)