def generateLearningCurve(X, y, degree, regLambda): ''' computing learning curve via leave one out CV ''' n = len(X) errorTrains = np.zeros((n, n - 1)) errorTests = np.zeros((n, n - 1)) loo = LeaveOneOut() itrial = 0 for train_index, test_index in loo.split(X): #print("TRAIN indices:", train_index, "TEST indices:", test_index) X_train, X_test = X[train_index], X[test_index] y_train, y_test = y[train_index], y[test_index] (errTrain, errTest) = learningCurve(X_train, y_train, X_test, y_test, regLambda, degree) errorTrains[itrial, :] = errTrain errorTests[itrial, :] = errTest itrial = itrial + 1 errorTrain = errorTrains.mean(axis=0) errorTest = errorTests.mean(axis=0) plotLearningCurve(errorTrain, errorTest, regLambda, degree)
def generateLearningCurve(X, y, degree, regLambda): ''' computing learning curve via leave one out CV ''' n = len(X); errorTrains = np.zeros((n, n-1)); errorTests = np.zeros((n, n-1)); loo = cross_validation.LeaveOneOut(n) itrial = 0 for train_index, test_index in loo: #print("TRAIN indices:", train_index, "TEST indices:", test_index) X_train, X_test = X[train_index], X[test_index] y_train, y_test = y[train_index], y[test_index] (errTrain, errTest) = learningCurve(X_train, y_train, X_test, y_test, regLambda, degree) errorTrains[itrial, :] = errTrain errorTests[itrial, :] = errTest itrial = itrial + 1 errorTrain = errorTrains.mean(axis = 0) errorTest = errorTests.mean(axis = 0) plotLearningCurve(errorTrain, errorTest, regLambda, degree)