Exemplo n.º 1
0
    def test_levenberg_marquardt(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target

        data_scaler = preprocessing.MinMaxScaler()
        target_scaler = preprocessing.MinMaxScaler()

        x_train, x_test, y_train, y_test = train_test_split(
            data_scaler.fit_transform(data),
            target_scaler.fit_transform(target),
            train_size=0.85)

        # Network
        lmnet = algorithms.LevenbergMarquardt(connection=[
            layers.SigmoidLayer(10),
            layers.SigmoidLayer(40),
            layers.OutputLayer(1),
        ],
                                              mu_increase_factor=2,
                                              mu=0.1,
                                              show_epoch=10,
                                              use_bias=False)
        lmnet.train(x_train, y_train, epochs=100)
        y_predict = lmnet.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        error

        self.assertAlmostEqual(0.4372, error, places=4)
Exemplo n.º 2
0
    def test_pandas_for_bp(self):
        dataset = datasets.load_diabetes()

        input_scaler = preprocessing.MinMaxScaler()
        target_scaler = preprocessing.MinMaxScaler()

        n_features = dataset.data.shape[1]
        input_columns = ['column_' + str(i) for i in range(n_features)]

        pandas_data = pd.DataFrame(dataset.data, columns=input_columns)
        pandas_data['target'] = target_scaler.fit_transform(dataset.target)
        pandas_data[input_columns] = input_scaler.fit_transform(
            pandas_data[input_columns])

        x_train, x_test, y_train, y_test = train_test_split(
            pandas_data[input_columns], pandas_data['target'], train_size=0.85)

        bpnet = algorithms.Backpropagation(connection=[
            layers.SigmoidLayer(10),
            layers.SigmoidLayer(40),
            layers.OutputLayer(1),
        ],
                                           use_bias=True,
                                           show_epoch=100)
        bpnet.train(x_train, y_train, epochs=1000)
        y_predict = bpnet.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        self.assertAlmostEqual(0.4477, error, places=4)
Exemplo n.º 3
0
    def test_hessian_diagonal(self):
        dataset = datasets.load_diabetes()
        data, target = dataset.data, dataset.target

        input_scaler = preprocessing.StandardScaler()
        target_scaler = preprocessing.StandardScaler()

        x_train, x_test, y_train, y_test = cross_validation.train_test_split(
            input_scaler.fit_transform(data),
            target_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.8)

        nw = algorithms.HessianDiagonal(connection=[
            layers.SigmoidLayer(10),
            layers.SigmoidLayer(20),
            layers.OutputLayer(1)
        ],
                                        step=1.5,
                                        shuffle_data=False,
                                        verbose=False,
                                        min_eigenvalue=1e-10)
        nw.train(x_train, y_train, epochs=10)
        y_predict = nw.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())

        self.assertAlmostEqual(0.5032, error, places=4)
Exemplo n.º 4
0
    def test_pipeline(self):
        dataset = datasets.load_diabetes()
        target_scaler = preprocessing.MinMaxScaler()

        x_train, x_test, y_train, y_test = train_test_split(
            dataset.data,
            target_scaler.fit_transform(dataset.target),
            train_size=0.85)

        network = algorithms.Backpropagation(
            connection=[
                layers.SigmoidLayer(10),
                layers.SigmoidLayer(40),
                layers.OutputLayer(1),
            ],
            use_bias=True,
            show_epoch=100,
            verbose=False,
        )
        pipeline = Pipeline([
            ('min_max_scaler', preprocessing.MinMaxScaler()),
            ('backpropagation', network),
        ])
        pipeline.fit(x_train, y_train, backpropagation__epochs=1000)
        y_predict = pipeline.predict(x_test)

        error = rmsle(target_scaler.inverse_transform(y_test),
                      target_scaler.inverse_transform(y_predict).round())
        self.assertAlmostEqual(0.4481, error, places=4)
Exemplo n.º 5
0
    def test_quasi_newton_bfgs(self):
        x_train, x_test, y_train, y_test = self.data

        qnnet = algorithms.QuasiNewton(
            connection=[
                layers.SigmoidLayer(10, init_method='ortho'),
                layers.SigmoidLayer(20, init_method='ortho'),
                layers.OutputLayer(1)
            ],
            step=0.1,
            shuffle_data=True,
            show_epoch=20,
            verbose=False,
            update_function='bfgs',
            h0_scale=5,
            gradient_tol=1e-5,
        )
        qnnet.train(x_train, y_train, x_test, y_test, epochs=10)
        result = qnnet.predict(x_test).round()

        roc_curve_score = metrics.roc_auc_score(result, y_test)
        self.assertAlmostEqual(0.92, roc_curve_score, places=2)
Exemplo n.º 6
0
    def test_linear_search(self):
        methods = [
            ('golden', 0.20976),
            ('brent', 0.21190),
        ]

        for method_name, valid_error in methods:
            np.random.seed(self.random_seed)

            dataset = datasets.load_boston()
            data, target = dataset.data, dataset.target

            data_scaler = preprocessing.MinMaxScaler()
            target_scaler = preprocessing.MinMaxScaler()

            x_train, x_test, y_train, y_test = train_test_split(
                data_scaler.fit_transform(data),
                target_scaler.fit_transform(target.reshape(-1, 1)),
                train_size=0.85
            )

            cgnet = algorithms.ConjugateGradient(
                connection=[
                    layers.SigmoidLayer(13),
                    layers.SigmoidLayer(50),
                    layers.OutputLayer(1),
                ],
                search_method=method_name,
                show_epoch=25,
                optimizations=[algorithms.LinearSearch],
            )
            cgnet.train(x_train, y_train, epochs=100)
            y_predict = cgnet.predict(x_test).round(1)

            error = rmsle(target_scaler.inverse_transform(y_test),
                          target_scaler.inverse_transform(y_predict))

            self.assertAlmostEqual(valid_error, error, places=5)
Exemplo n.º 7
0
dataset = datasets.load_boston()
data, target = dataset.data, dataset.target

data_scaler = preprocessing.MinMaxScaler()
target_scaler = preprocessing.MinMaxScaler()

data = data_scaler.fit_transform(data)
target = target_scaler.fit_transform(target)

x_train, x_test, y_train, y_test = train_test_split(data,
                                                    target,
                                                    train_size=0.85)

cgnet = algorithms.ConjugateGradient(
    connection=[
        layers.SigmoidLayer(13),
        layers.SigmoidLayer(50),
        layers.OutputLayer(1),
    ],
    search_method='golden',
    show_epoch=25,
    verbose=True,
    optimizations=[algorithms.LinearSearch],
)

cgnet.train(x_train, y_train, x_test, y_test, epochs=100)
cgnet.plot_errors()

y_predict = cgnet.predict(x_test).round(1)
error = rmsle(target_scaler.inverse_transform(y_test),
              target_scaler.inverse_transform(y_predict))
Exemplo n.º 8
0
def get_connection():
    """ Generate new connections every time when we call it """
    input_layer = layers.SigmoidLayer(2, weight=default_weight.copy())
    output_layer = layers.OutputLayer(1)
    return input_layer > output_layer
Exemplo n.º 9
0
    def test_handle_errors(self):
        networks = [
            algorithms.Backpropagation((1, 20, 1), step=0.2),
            algorithms.Backpropagation((1, 20, 1), step=0.2),
        ]

        with self.assertRaises(ValueError):
            # Ivalid network (not Backpropagation)
            ensemble.MixtureOfExperts(
                networks=networks + [algorithms.GRNN()],
                gating_network=algorithms.Backpropagation(
                    layers.SigmoidLayer(1) > layers.OutputLayer(3), ))

        with self.assertRaises(ValueError):
            # Ivalid number of outputs in third network
            ensemble.MixtureOfExperts(
                networks=networks +
                [algorithms.Backpropagation((1, 20, 2), step=0.2)],
                gating_network=algorithms.Backpropagation(
                    layers.SigmoidLayer(1) > layers.OutputLayer(3), ))

        with self.assertRaises(ValueError):
            # Ivalid gating network output layer size
            ensemble.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.Backpropagation(
                    layers.SoftmaxLayer(1) > layers.OutputLayer(1), ))

        with self.assertRaises(ValueError):
            # Ivalid gating network input layer
            ensemble.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.Backpropagation(
                    layers.SigmoidLayer(1) > layers.OutputLayer(2), ))

        with self.assertRaises(ValueError):
            # Ivalid gating network output layer
            ensemble.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.Backpropagation(
                    layers.SoftmaxLayer(1) > layers.RoundOutputLayer(2)))

        with self.assertRaises(ValueError):
            # Ivalid network error function
            ensemble.MixtureOfExperts(
                networks=networks + [
                    algorithms.Backpropagation(
                        (1, 20, 1), step=0.2, error=rmsle)
                ],
                gating_network=algorithms.Backpropagation(
                    layers.SigmoidLayer(1) > layers.OutputLayer(3), ))

        with self.assertRaises(ValueError):
            moe = ensemble.MixtureOfExperts(
                # Ivalid gating error function
                networks=networks,
                gating_network=algorithms.Backpropagation(
                    layers.SoftmaxLayer(1) > layers.OutputLayer(2),
                    error=rmsle))

        moe = ensemble.MixtureOfExperts(
            # Ivalid gating network output layer
            networks=networks,
            gating_network=algorithms.Backpropagation(
                layers.SoftmaxLayer(1) > layers.OutputLayer(2)))
        with self.assertRaises(ValueError):
            # Wrong number of train input features
            moe.train(np.array([[1, 2]]), np.array([[0]]))

        with self.assertRaises(ValueError):
            # Wrong number of train output features
            moe.train(np.array([[1]]), np.array([[0, 0]]))