Ejemplo n.º 1
0
    def predict(self, X):
        """Predict target values.

        :param X: Features values
        :return: Target values
        """
        X = self._check_data(X)
        return super().predict(add_intercept(X))
Ejemplo n.º 2
0
    def fit(self, X, y):
        """Train the model.

        :param X: Features values
        :param y: Target values
        """
        X, y = self._check_fit_data(X, y)
        X = add_intercept(X)
        if self._optimizer is not None:
            super().fit(X, y)
        else:
            self._params = self._normal_equation(X, y)
Ejemplo n.º 3
0
    def predict(self, X):
        """Predict target values.

        :param X: Features values
        :return: Target values
        """
        X = self._check_data(X)

        predictions = super().predict(add_intercept(X))
        if len(self._labels) == 2:
            predictions = (predictions >= 0.5) * 1
        else:
            predictions = np.argmax(predictions, axis=1)
        return self._labels.take(predictions).flatten()
Ejemplo n.º 4
0
    def fit(self, X, y):
        """Train the model.

        :param X: Features values
        :param y: Target values
        """
        X, y = self._check_fit_data(X, y)
        X = add_intercept(X)
        self._labels, Y = one_hot(y)
        if len(self._labels) > 2 and self._use_softmax:
            initial_params = np.zeros((X.shape[1], Y.shape[1]))
            params = self._optimize_params(X, Y, unroll((initial_params,)))
            self._params = undo_unroll(params, (initial_params.shape,))[0]
        else:
            self._params = np.apply_along_axis(lambda y_bin: self._optimize_params(X, y_bin, np.zeros(X.shape[1])), 0, Y)
    result = neural_network._init_weights(X, y)

    for i in range(len(expected_shapes)):
        # Verify that weights matrix has correct shape
        assert_array_equal(result[i].shape, expected_shapes[i])
        # Check that all weights are unique
        assert result[i].size == np.unique(result[i]).size


X = [[0, 0], [0, 1], [1, 0], [1, 1]]
activation_function = Sigmoid

# A simple neural network with a single output that calculates XNOR expression for two binary inputs
weights_so = (np.asarray([[-30, 20, 20], [10, -20,
                                          -20]]), np.asarray([[-10, 20, 20]]))
expected_activations_so = (add_intercept(X),
                           add_intercept([[0, 1], [0, 0], [0, 0], [1,
                                                                   0]]), [[1],
                                                                          [0],
                                                                          [0],
                                                                          [1]])

# A simple neural network with multiple outputs that calculates XNOR and XOR expression for two binary inputs
weights_mo = (weights_so[0], np.asarray([[-10, 20, 20], [10, -20, -20]]))
expected_activations_mo = (expected_activations_so[0],
                           expected_activations_so[1], [[1, 0], [0, 1], [0, 1],
                                                        [1, 0]])


@pytest.mark.parametrize(
    "X, weights, output_activation, expected_activations",
Ejemplo n.º 6
0
def test_add_intercept():
    A_intercept = [[1, 1, 2, 3], [1, 1, 2, 3], [1, 1, 2, 3]]

    B = add_intercept(A)

    assert_array_equal(B, A_intercept)