def main():
    ridge_estimator = Ridge(alpha=0.1)
    ridge_estimator.fit(X_train, y_train)
    print(ridge_estimator.intercept_, ridge_estimator.coef_)

    lasso_estimator = Lasso(alpha=0.01)
    lasso_estimator.fit(X_train, y_train)
    print(lasso_estimator.intercept_, lasso_estimator.coef_)

    sklin_estimator = LinearRegression()
    sklin_estimator.fit(X_train, y_train)
    print(sklin_estimator.intercept_, sklin_estimator.coef_)

    my_estimator = MyLinearRegression(n_epochs=500)
    my_estimator.fit(X_train, y_train)
    print(my_estimator.w)

    myl2_estimator = MyLinearRegression(n_epochs=500, l2=0.01)
    myl2_estimator.fit(X_train, y_train)
    print(myl2_estimator.w)

    print("train scores")
    print(r2_score(y_train, ridge_estimator.predict(X_train)))
    print(r2_score(y_train, lasso_estimator.predict(X_train)))
    print(r2_score(y_train, sklin_estimator.predict(X_train)))
    print(r2_score(y_train, my_estimator.predict(X_train)))
    print(r2_score(y_train, myl2_estimator.predict(X_train)))

    print("test scores")
    print(r2_score(y_test, ridge_estimator.predict(X_test)))
    print(r2_score(y_test, lasso_estimator.predict(X_test)))
    print(r2_score(y_test, sklin_estimator.predict(X_test)))
    print(r2_score(y_test, my_estimator.predict(X_test)))
    print(r2_score(y_test, myl2_estimator.predict(X_test)))
Beispiel #2
0
    def test_standardization_is_used(self):
        X = numpy.asarray([[2., 3.], [0., 1.], [2., 0.], [0., 0.]])
        y = numpy.asarray([1., 5., 3., 1.])

        estimator = MyLinearRegression(standardize=True)
        estimator.fit(X, y)

        assert_array_equal(estimator.standard_scaler_x.mean_, [1., 1.])

        y_pred = estimator.predict(X)

        estimator.standard_scaler_x.mean_ = [23., 23.]
        y_pred_unscaled = estimator.predict(X)
        self.assertNotEqual(list(y_pred), list(y_pred_unscaled))
Beispiel #3
0
    def test_standardize_y(self):
        X = numpy.asarray([[1.63295], [-1.63295], [0.]])
        y = numpy.asarray([2., -2., 0.])

        estimator = MyLinearRegression(standardize=True)
        estimator.fit(X, y)

        assert_array_almost_equal(estimator.w, [0., 1.], decimal=3)
        assert_array_almost_equal(estimator.predict(X), [2., -2., 0.],
                                  decimal=3)
Beispiel #4
0
    def test_example_two_features(self):
        X = numpy.asarray([[2., 2.], [0., 2.], [2., 0.], [0., 0.]])
        y = numpy.asarray([1., -1., 3., 1.])

        estimator = MyLinearRegression(learning_rate=0.2)
        estimator.fit(X, y)

        self.assertAlmostEqual(estimator.w[0], 1, places=1)
        self.assertAlmostEqual(estimator.w[1], 1, places=1)
        self.assertAlmostEqual(estimator.w[2], -1, places=1)

        y_pred = estimator.predict(X)
        self.assertAlmostEqual(y_pred[0], 1., places=1)
Beispiel #5
0
    0.5430171847213586, 0.6838572594876159, 0.5257790543690498,
    0.749720166273106, 0.7417230143992304, 0.7170392536658252,
    0.6054873002426322, 0.6494912378903008, 0.7836036247312703,
    0.5660181206880279, 0.6611925537901934, 0.677418202867866,
    0.6212332444014119, 0.755211481708266, 0.6574209206631763,
    0.7961329669363469, 0.6780963740303935, 0.5777549618568788,
    0.6029694914018995, 0.7055898454746773, 0.7615936253567677,
    0.6509820273225375, 0.6921296101111231, 0.4965640242465189,
    0.5969105590529076, 0.7922304500021877
]

Xs = numpy.asarray(xs)
Xs = Xs.reshape((len(xs), 1))
Ys = numpy.asarray(ys)
my_estimator.fit(Xs, ys)
my_PYs = my_estimator.predict(Xs)
sk_estimator.fit(Xs, ys)
sk_PYs = sk_estimator.predict(Xs)

my_squared_error = mean_squared_error(Ys, my_PYs)
print("my squared errors", my_squared_error)
scikit_quared_error = mean_squared_error(Ys, sk_PYs)
print("scikit squarred errors", scikit_quared_error)

assert my_squared_error < 0.006

from matplotlib import pyplot as plt
plt.subplot(3, 1, 1)
plt.hist(Ys)
plt.xlim((0.3, 0.9))
plt.subplot(3, 1, 2)
Beispiel #6
0
    return y


lin_fun = get_y_fun(a, b, c)

n = 1000
range_points = 1
range_plot = 1.1

sigma = 0.05

X = range_points * 2 * (np.random.rand(n, 2) - 0.5)

y = np.asfarray([lin_rule(x, sigma * np.random.normal()) for x in X])

print(X[:10])
print(y[:10])

clf = MyLinearRegression()
clf.fit(X, y)

y_pred = clf.predict(X)

from matplotlib import pyplot
pyplot.hist(y)
pyplot.hist(y_pred)
pyplot.draw()

pyplot.show()
import numpy as np
from mylinearregression import MyLinearRegression as MyLR
x = np.array([[12.4956442], [21.5007972], [31.5527382], [48.9145838],
              [57.5088733]])
y = np.array([[37.4013816], [36.1473236], [45.7655287], [46.6793434],
              [59.5585554]])

lr1 = MyLR([2, 0.7])

# Example 0.0:
print(lr1.predict(x))
# Output:
# array([[10.74695094],
#        [17.05055804],
#        [24.08691674],
#        [36.24020866],
#        [42.25621131]])

# Example 0.1:
print(lr1.cost_elem_(lr1.predict(x), y))
# Output:
# array([[77.72116511],
#        [49.33699664],
#        [72.38621816],
#        [37.29223426],
#        [78.28360514]])

# Example 0.2:
print(lr1.cost_(lr1.predict(x), y))
# Output:
# 315.0202193084312