def test_LinearRegression_warm_start(self):
        """...Test LinearRegression warm start
        """
        X, y, weights0, intercept0 = Test.get_train_data()

        fit_intercepts = [True, False]
        cases = itertools.product(LinearRegression._solvers.keys(),
                                  fit_intercepts)

        for solver, fit_intercept in cases:
            solver_kwargs = {
                'solver': solver,
                'max_iter': 2,
                'fit_intercept': fit_intercept,
                'warm_start': True,
                'tol': 0
            }

            learner = LinearRegression(**solver_kwargs)
            learner.fit(X, y)
            if fit_intercept:
                coeffs_1 = np.hstack((learner.weights, learner.intercept))
            else:
                coeffs_1 = learner.weights

            learner.fit(X, y)
            if fit_intercept:
                coeffs_2 = np.hstack((learner.weights, learner.intercept))
            else:
                coeffs_2 = learner.weights
            # Thanks to warm start objective should have decreased
            self.assertLess(
                learner._solver_obj.objective(coeffs_2),
                learner._solver_obj.objective(coeffs_1))
 def test_LinearRegression_fit(self):
     """...Test LinearRegression fit with different solvers and penalties
     """
     fit_intercepts = [False, True]
     n_samples = 2000
     n_features = 20
     X, y, weights0, _ = self.get_train_data(
         n_samples=n_samples, n_features=n_features, fit_intercept=False)
     intercept0 = -1
     for i, (solver, penalty, fit_intercept) \
             in enumerate(product(LinearRegression._solvers.keys(),
                                  LinearRegression._penalties.keys(),
                                  fit_intercepts)):
         if fit_intercept:
             y_ = y + intercept0
         else:
             y_ = y.copy()
         if penalty == 'binarsity':
             learner = LinearRegression(
                 verbose=False, fit_intercept=fit_intercept, solver=solver,
                 penalty=penalty, tol=1e-10, blocks_start=[0],
                 blocks_length=[1])
         else:
             learner = LinearRegression(
                 verbose=False, fit_intercept=fit_intercept, solver=solver,
                 penalty=penalty, tol=1e-10)
         learner.fit(X, y_)
         err = norm(learner.weights - weights0) / n_features ** 0.5
         self.assertLess(err, 3e-2)
         if fit_intercept:
             self.assertLess(abs(learner.intercept - intercept0), 3e-2)
 def test_score(self):
     """...Test LinearRegression predict
     """
     X_train, y_train, _, _ = self.get_train_data(n_samples=2000,
                                                  n_features=12)
     learner = LinearRegression(random_state=32789, tol=1e-9)
     learner.fit(X_train, y_train)
     X_test, y_test, _, _ = self.get_train_data(n_samples=200,
                                                n_features=12)
     self.assertAlmostEqual(
         learner.score(X_test, y_test), 0.793774, places=4)
 def test_predict(self):
     """...Test LinearRegression predict
     """
     X_train, y_train, _, _ = self.get_train_data(n_samples=200,
                                                  n_features=12)
     learner = LinearRegression(random_state=32789, tol=1e-9)
     learner.fit(X_train, y_train)
     X_test, y_test, _, _ = self.get_train_data(n_samples=5, n_features=12)
     y_pred = np.array([0.084, -1.4276, -3.1555, 2.6218, 0.3736])
     np.testing.assert_array_almost_equal(
         learner.predict(X_test), y_pred, decimal=4)