eps = 1.
elif algorithm == 'logistic_regression':
    eps = 5.
elif algorithm == 'polynomial_regression':
    X = polynomial_transform(X)
    eps = 100

nSamples = X.shape[0]
plt.ion()

w = np.zeros(X.shape[1])
b = 0.

for i in range(100):
    if algorithm == 'perceptron':
        # Predicted outputs
        T = (np.dot(X, w) + b) > 0
        C = T
    else:
        # Predicted outputs
        T = sigm(np.dot(X, w) + b)
        C = (T > .5)

    w += eps*np.dot(Y - T, X)/nSamples
    b += eps*np.mean(Y - T)

    # Draw the decision boundary.
    plt.clf()
    plt.title('p = ' + str(X.shape[1]) + ', Iteration = ' + str(i) + ', Error = ' + str(np.mean(Y != C)))
    decision_boundary(Xorig, Y, w, b)
예제 #2
0
import numpy as np
import matplotlib.pyplot as plt
from decision_boundary import *

# Load the dataset
with np.load('./data.npz') as data:
    X = data['X']
    Y = data['Y']

w = np.zeros(X.shape[1])
b = 0.
nSamples = X.shape[0]
plt.ion()

for i in range(100):
    # Predicted outputs
    T = np.sign(np.dot(X, w) + b)
    w += .01 * np.dot(Y - T, X) / nSamples
    b += .01 * np.mean(Y - T)

    # Draw the decision boundary.
    plt.clf()
    plt.scatter(X[:, 0], X[:, 1], c=Y, cmap='winter')
    plt.axis([-2, 4, -5, 3])
    plt.title('Iteration = ' + str(i) + ', Error = ' + str(np.mean(Y != T)))
    plt.axis('off')
    decision_boundary(w, b)
예제 #3
0
w = np.zeros(X.shape[1])
b = 0.

T = np.zeros(nSamples)
C = np.zeros(nSamples)
sum_gradients = np.zeros(X.shape[1])
gradients = np.zeros(nSamples)

for i in range(100 * nSamples):
    # Draw an example at random
    n = np.random.randint(nSamples)

    # Predicted outputs
    T[n] = sigm(np.dot(X[n], w) + b)
    C[n] = (T[n] > .5)

    sum_gradients -= gradients[n] * X[n]
    gradients[n] = Y[n] - T[n]
    sum_gradients += gradients[n] * X[n]

    w += eps * sum_gradients / nSamples
    b += eps * np.mean(gradients[n])

    if i % nSamples == 0:
        # Draw the decision boundary.
        plt.clf()
        plt.title('p = ' + str(X.shape[1]) + ', Iteration = ' +
                  str(i / nSamples) + ', Error = ' + str(np.mean(Y != C)))
        decision_boundary(Xorig, Y, w, b)
예제 #4
0
import numpy as np
import matplotlib.pyplot as plt
from decision_boundary import *

# Load the dataset
with np.load('./data.npz') as data:
    X = data['X']
    Y = data['Y']

w = np.zeros(X.shape[1])
b = 0.
nSamples = X.shape[0]
plt.ion()

for i in range(100):
    # Predicted outputs
    T = np.sign(np.dot(X, w) + b)
    w += .01*np.dot(Y - T, X)/nSamples
    b += .01*np.mean(Y - T)

    # Draw the decision boundary.
    plt.clf()
    plt.scatter(X[:, 0], X[:, 1], c=Y, cmap='winter')
    plt.axis([-2, 4, -5, 3])
    plt.title('Iteration = ' + str(i) + ', Error = ' + str(np.mean(Y != T)))
    plt.axis('off')
    decision_boundary(w, b)