Exemple #1
0
def test_linear_regression():
    """
    Test set "stolen" from scikit learn
    """
    # this is our test set, it's just a straight line with some
    # gaussian noise
    xmin, xmax = -5, 5
    n_samples = 100
    X = np.array([[i] for i in np.linspace(xmin, xmax, n_samples)])
    Y = np.array(2 + 0.5 * np.linspace(xmin, xmax, n_samples) \
        + np.random.randn(n_samples, 1).ravel())

    beta, u = linear_regression(X, Y)

    plt.scatter(X, Y, color='black')
    plt.plot(X, np.dot(X, beta) + u, linewidth=1)
    plt.show()
Exemple #2
0
def plot(X, Y, XtA, title="ClassificationA.png"):
    fig = plt.figure()
    colors = ['#4EACC5', '#FF9C34', '#aaaaaa', '#4E9A06', '#00465F', "#7E2007"]
    my_members = Y == 0
    my_members.shape = (my_members.shape[0])
    ax = fig.add_subplot(1, 1, 1)

    ax.plot(X[my_members, 0], X[my_members, 1],
            'w', markerfacecolor=colors[0], marker = '.')

    my_members = Y == 1
    my_members.shape = (my_members.shape[0])
    ax.plot(X[my_members, 0], X[my_members, 1],
            'w', markerfacecolor=colors[1], marker = '.')


    beta, u = classification.LDA(X, Y)
    YtcA = classification.logistic_regression_predict(XtA, beta, u)
    x_beta = [[i] for i in np.linspace(X.min(), X.max(), 100)]
    y_beta =  (- u - beta[0] * np.linspace(X.min(), X.max(), 100)) * 1 / beta[1]
    ax.plot(x_beta, y_beta, color=colors[3], linewidth=1)


    beta, u = classification.logistic_regression(X, Y, verbose=False)
    x_beta = [[i] for i in np.linspace(X.min(), X.max(), 100)]
    y_beta =  (- u - beta[0] * np.linspace(X.min(), X.max(), 100)) * 1 / beta[1]
    ax.plot(x_beta, y_beta, color=colors[4], linewidth=1)

    YtcA = classification.logistic_regression_predict(XtA, beta, u)

    beta, u = classification.linear_regression(X, Y)
    YtcA = classification.linear_regression_predict(XtA, beta, u)
    x_beta = [[i] for i in np.linspace(X.min(), X.max(), 100)]
    y_beta =  (0.5 - u - beta[0] * np.linspace(X.min(), X.max(), 100)) * 1 / beta[1]
    ax.plot(x_beta, y_beta, color=colors[5], linewidth=1)

    labels = ('unknown', 'label 0', 'label 1', 'LDA model', 'logistic regression', 'linear regression')
    legend = plt.legend(labels, loc=(0.9, .95), labelspacing=0.1)
    plt.setp(legend.get_texts(), fontsize='small')

    plt.show()
    plt.savefig(title)
Exemple #3
0
print "****************"
print

beta, u = classification.LDA(XA, YA)
YtcA = classification.logistic_regression_predict(XtA, beta, u)
erreur = classification.error(YtcA, YtA)

print "Jeu de test A - Modèle LDA: erreur %s" % erreur

beta, u = classification.logistic_regression(XA, YA, verbose=False)
YtcA = classification.logistic_regression_predict(XtA, beta, u)
erreur = classification.error(YtcA, YtA)

print "Jeu de test A - Regression logisitique: erreur %s" % erreur

beta, u = classification.linear_regression(XA, YA)
YtcA = classification.linear_regression_predict(XtA, beta, u)
erreur = classification.error(YtcA, YtA)

print "Jeu de test A - Regression linéaire: erreur %s" % erreur

# Jeu de données B
print
print
print "Jeu de données B"
print "****************"
print

beta, u = classification.LDA(XB, YB)
YtcB = classification.logistic_regression_predict(XtB, beta, u)
erreur = classification.error(YtcB, YtB)
Exemple #4
0
import numpy as np

from mpl_toolkits.axes_grid.axislines import SubplotZero
import matplotlib.pyplot as plt


from classification import linear_regression
from utils import load_data

verbose = True
max_iter = 500

X, Y = load_data('classificationA.train')

beta, u = linear_regression(X, Y)

# Let's plot the result
fig = plt.figure(1)
colors = ['#4EACC5', '#FF9C34', '#4E9A06']
my_members = Y == 0
my_members.shape = (my_members.shape[0])
ax = SubplotZero(fig, 111)
fig.add_subplot(ax)

for direction in ["xzero", "yzero"]:
    ax.axis[direction].set_axisline_style("-|>")
    ax.axis[direction].set_visible(True)

for direction in ["left", "right", "bottom", "top"]:
    ax.axis[direction].set_visible(False)