def __call__(self, X, y, *args, **kwargs):
        if None in (self.function, self.gradient):
            raise NotBuilt("you must build the optimizer before calling it")

        if not check_dataset_consistency(X, y):
            raise InvalidInput(
                "the features set and target set must have as many rows")

        X, y = shuffle_dataset(X, y)
        m = y.shape[0]

        self.parameters = np.zeros((X.shape[1], 1))
        for _ in range(self.iterations):
            for i in range(0, m, self.batch_size):
                self.parameters = self.parameters - (
                    self.learning_rate / m) * self.gradient(
                        X[i:i + self.batch_size],
                        y[i:i + self.batch_size],
                        self.parameters,
                        *args,
                        **kwargs,
                    )

            if self.is_history_enabled:
                self.history.append((1 / self.batch_size) *
                                    self.function(X, y, self.parameters))

        return self.parameters
Exemple #2
0
    def fit(self, X, y):
        """Fit the model."""
        X = features_reshape(X)
        if not check_dataset_consistency(X, y):
            raise InvalidInput("the features set and target set must have as many rows")

        if self.standardize is not None:
            X = self.standardize(X)
        X = np.concatenate((np.ones((X.shape[0], 1)), X), axis=1)
        self.optimizer(X, y)
Exemple #3
0
    def fit(self, X, y):
        """Fit the model."""
        X = features_reshape(X)
        if not check_dataset_consistency(X, y):
            raise InvalidInput(
                "the features set and target set must have as many rows")

        if self.standardize is not None:
            X = self.standardize(X)
        X = np.concatenate((np.ones((X.shape[0], 1)), X), axis=1)

        self.labels = np.unique(y)
        n_labels = np.size(self.labels)
        if n_labels < 2:
            raise InvalidInput(
                "target must have at least two different classes")
        elif n_labels == 2:
            self.optimizer(X, y)
        else:
            self.optimizer(X, (y == self.labels).astype(int))
Exemple #4
0
def test_check_dataset_consistency_invalid():
    """Test of `check_dataset_consistency` with invalid input ."""
    X = np.arange(9).reshape(3, 3)
    y = np.arange(4).reshape(2, 2)

    assert not check_dataset_consistency(X, y)
Exemple #5
0
def test_check_dataset_consistency_valid():
    """Test of `check_dataset_consistency` with valid input."""
    X = np.arange(9).reshape(3, 3)
    y = np.arange(3).reshape(3, 1)

    assert check_dataset_consistency(X, y)