Exemplo n.º 1
0
def test5():
    print("\n\nTest 5 - Algorithm Tweaks (Bias & Variance)")
    print("Expected / Actual:")

    print("\nRegularized Linear Regression: ")
    X, y = ut.read_mat('mat/ex5data1.mat')
    X = ut.create_design(X)
    theta = np.array([1, 1])
    print("303.993 / ", alg.SSD(theta, X, y, 1))
    grad = alg.SSD_gradient(theta, X, y, 1)
    print("-15.30 / ", grad[0])
    print("598.250 / ", grad[1])

    print("\nLearning Curve:")
    raw = ut.read_mat_raw('mat/ex5data1.mat')
    X = raw['X']
    y = raw['y'].reshape(-1)

    Xval = raw['Xval']
    yval = raw['yval'].reshape(-1)
    print("Check plot")
    # pt.plot_learning_curve(ut.create_design(X), y, ut.create_design(Xval), yval, 0)

    print("\nFitting polynomial regression:")
    p = 8
    X_poly = ut.poly_features(X, p)
    X_poly, mu, sigma = ut.normalize_features(X_poly)
    X_poly = ut.create_design(X_poly)

    Xval = ut.poly_features(Xval, p)
    Xval -= mu
    Xval /= sigma
    Xval = ut.create_design(Xval)

    l = 0.01
    theta = alg.parametrize_linear(X_poly, y, l)

    print("Check plot, l =", l)
    pt.fit_plot(X, y, mu, sigma, theta, p)
    pt.plot_learning_curve(X_poly, y, Xval, yval, l)

    print("\nOptimize regularization:")
    print("Check plot")

    l = pt.plot_validation_curve(X_poly, y, Xval, yval)

    Xtest = raw['Xtest']
    ytest = raw['ytest'].reshape(-1)
    Xtest = ut.poly_features(Xtest, p)
    Xtest -= mu
    Xtest /= sigma
    Xtest = ut.create_design(Xtest)

    theta = alg.parametrize_linear(X_poly, y, l)
    print("3.8599 / ", alg.SSD(theta, Xtest, ytest, 0))

    print("\nRandomized learning curve:")
    print("Check plot")
    pt.plot_randomized_learning_curve(X_poly, y, Xval, yval, 0.01)
    return
Exemplo n.º 2
0
def plot_learning_curve( X, y, Xval, yval, l ):
  train_err = np.zeros(y.shape)
  valid_err = np.zeros(y.shape)

  for i in range(y.shape[0]):
    theta = alg.parametrize_linear(X[0:i+1, :], y[0:i+1], l)
    train_err[i] = alg.SSD(theta, X[0:i+1], y[0:i+1], 0)
    valid_err[i] = alg.SSD(theta, Xval, yval, 0)

  pt.plot(np.arange(1, train_err.shape[0]+1), train_err, label='Training')
  pt.plot(np.arange(1, valid_err.shape[0]+1), valid_err, label='Validation')
  pt.xlabel('# of Training Examples')
  pt.ylabel('Error')
  pt.legend()
  pt.show()

  return
Exemplo n.º 3
0
def plot_randomized_learning_curve( X, y, Xval, yval, l, iter=50 ):
  train_err = np.zeros(y.shape)
  valid_err = np.zeros(y.shape)

  for i in range(y.shape[0]):
    for j in range(iter):
      idx = np.random.randint(0, high=y.shape[0], size=i+1)
      theta = alg.parametrize_linear(X[idx, :], y[idx], l)
      train_err[i] += alg.SSD(theta, X[idx, :], y[idx], 0)
      valid_err[i] += alg.SSD(theta, Xval[idx, :], yval[idx], 0)
    train_err[i] /= iter
    valid_err[i] /= iter

  pt.plot(np.arange(1, train_err.shape[0]+1), train_err, label='Training')
  pt.plot(np.arange(1, valid_err.shape[0]+1), valid_err, label='Validation')
  pt.xlabel('# of Training Examples')
  pt.ylabel('Error')
  pt.legend()
  pt.show()
  return
Exemplo n.º 4
0
def plot_validation_curve( X, y, Xval, yval ):
  l_vec = np.array([0, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 2, 2.5, 3, 3.3, 10])
  train_err = np.zeros(l_vec.shape)
  valid_err = np.zeros(l_vec.shape)

  for i in range(l_vec.shape[0]):
    theta = alg.parametrize_linear(X, y, l_vec[i])
    train_err[i] = alg.SSD(theta, X, y, 0)
    valid_err[i] = alg.SSD(theta, Xval, yval, 0)

    if(i == 0): print("Lambda\t\tTraining Error\tValidation Error")
    print("%f\t%f\t%f" % (l_vec[i], train_err[i], valid_err[i]))

  pt.plot(l_vec, train_err, label='Training')
  pt.plot(l_vec, valid_err, label='Validation')
  pt.legend('Training', 'Validation')
  pt.xlabel('Lambda')
  pt.ylabel('Error')
  pt.legend()
  pt.show()

  l = l_vec[np.argmin(valid_err)]
  return l
Exemplo n.º 5
0
def test1():
    print("\n\nTest 1 - Linear Regression")
    print("Expected / Actual:")

    print("\nBatch gradient descent: ")
    X, y = ut.read_csv('csv/ex1data1.csv')
    X = ut.create_design(X)
    theta = np.zeros((X.shape[1], ))
    iterations = 1500
    alpha = 0.01
    print("32.0727 / ", alg.SSD(theta, X, y))
    print("52.2425 / ", alg.SSD(np.array([-1, 2]), X, y))
    alg.batch_gd(X, y, theta, alpha, iterations, alg.SSD_gradient)
    print("-3.630291 / ", theta[0])
    print("1.166362 / ", theta[1])
    print("34962.991574 / ",
          ut.predict(np.array([[6.1101]]), theta)[0] * 10**4)
    print("45342.450129 / ", ut.predict(np.array([[7]]), theta)[0] * 10**4)

    print("\nWith optimization: ")
    theta = np.zeros((X.shape[1]), )
    res = opt.minimize(alg.SSD,
                       theta, (X, y),
                       jac=alg.SSD_gradient,
                       method='Newton-CG',
                       options={"maxiter": 1500})
    theta = res.x
    print("-3.630291 / ", theta[0])
    print("1.166362 / ", theta[1])
    print("34962.991574 / ",
          ut.predict(np.array([[6.1101]]), theta)[0] * 10**4)
    print("45342.450129 / ", ut.predict(np.array([[7]]), theta)[0] * 10**4)

    print("\nNormalized batch gradient descent:")
    X, y = ut.read_csv('csv/ex1data2.csv')
    X, mu, sigma = ut.normalize_features(X)
    X = ut.create_design(X)
    alpha = 0.1
    iterations = 400
    theta = np.zeros((X.shape[1], ))
    alg.batch_gd(X, y, theta, alpha, iterations, alg.SSD_gradient)
    print("2000.680851 / ", mu[0])
    print("3.170213 / ", mu[1])
    print("794.7024 / ", sigma[0])
    print("0.7610 / ", sigma[1])
    print("340412.659574 / ", theta[0])
    print("110631.048958 / ", theta[1])
    print("-6649.472950 / ", theta[2])

    print("\nNormal equation:")
    X, y, = ut.read_csv('csv/ex1data2.csv')
    X = ut.create_design(X)
    alg.normal_eqn(X, y)
    print("340412.659574 / ", theta[0])
    print("110631.048958 / ", theta[1])
    print("-6649.472950 / ", theta[2])

    print("\nNormalized prediction:")
    print("293081.464622 / ",
          ut.predict(np.array([[1650, 3]]), theta, mu, sigma)[0])
    print("284343.447245 / ",
          ut.predict(np.array([[1650, 4]]), theta, mu, sigma)[0])

    return