def run_testsuite(train_X, train_Y): print("\nSize of training set X: {}".format(train_X.shape)) print("Size of training set Y: {}".format(train_Y.shape)) print("\n+++++Show method 1++++") train_method1(train_X, train_Y) print("\n+++++Show method 2++++") train_method2(train_X, train_Y) print("\n+++++Show method 3++++") train_method3(train_X, train_Y) print("\n+++++Show method 4++++") train_method4(train_X, train_Y) print("\n+++++Show method 5++++") train_method5(train_X, train_Y) print("\n+++++Show method 6++++") #train_method6(train_X, train_Y) # uncomment this if you want to show contour of error graph #plot_surface_error(train_X, train_Y) print("\n+++++Show method 7++++") FX_List = train_method7(train_X, train_Y) am.visualize(train_X, train_Y, FX_List)
def test_polynomial(X, train_Y ,title, xlabel, ylabel): # Preprocessing data scaler_X = preprocessing.StandardScaler().fit(X) scaler_Y = preprocessing.StandardScaler().fit(train_Y) X__ = scaler_X.transform(X) train_Y = scaler_Y.transform(train_Y) #print(X__[0:5]) #print(train_Y[0:5]) # Generate polynomial features (2 degree). degree = 2 # You can change to degree 3, 4, 5 ant other at here poly = PolynomialFeatures(degree) # output for degree 2 = [1, x, x^2] # output for degree 3 = [1, x, x^2, x^3] train_X = poly.fit_transform(X__) #print(train_X[0:5]) # remove 1 value from array (index 0) train_X = np.delete(train_X, [0], 1) #print(train_X[0:5]) assert train_X.shape == (len(train_X), degree) # (xxx, degree) assert train_Y.shape == (len(train_Y), 1) # (xxx, 1) print("\n+++++ Example 1++++") predict = predict_example1(train_X, train_Y) show_graph(X, scaler_Y.inverse_transform(train_Y), scaler_Y.inverse_transform(predict) ,title, xlabel, ylabel) print("\n+++++ Example 2++++") predict = predict_example2(train_X, train_Y) print("\n+++++ Example 3++++") predict = predict_example3(train_X, train_Y) print("\n+++++ Example 4++++") predict = predict_example4(train_X, train_Y) print("\n+++++ Example 5++++") predictList, accuracyList, lossList = predict_example5(train_X, train_Y) am.visualize(X, scaler_Y.inverse_transform(train_Y), scaler_Y.inverse_transform(predictList), accuracyList, lossList, title=title)
def test_one_input(X, train_Y ,title, xlabel, ylabel): # Preprocessing data scaler_X = preprocessing.StandardScaler().fit(X) scaler_Y = preprocessing.StandardScaler().fit(train_Y) train_X = scaler_X.transform(X) train_Y = scaler_Y.transform(train_Y) #print(X__[0:5]) #print(train_Y[0:5]) assert train_X.shape == train_Y.shape print("\n+++++ Example 1++++") predict = predict_example1(train_X, train_Y) show_graph(X, scaler_Y.inverse_transform(train_Y), scaler_Y.inverse_transform(predict) ,title, xlabel, ylabel) print("\n+++++ Example 2++++") predict = predict_example2(train_X, train_Y) print("\n+++++ Example 3++++") predict = predict_example3(train_X, train_Y) print("\n+++++ Example 4++++") predict = predict_example4(train_X, train_Y) print("\n+++++ Example 5++++") predictList, accuracyList, lossList = predict_example5(train_X, train_Y) am.visualize(X, scaler_Y.inverse_transform(train_Y), scaler_Y.inverse_transform(predictList), accuracyList, lossList, title=title) plot_surface_error(train_X, train_Y) print("\n+++++ Example 6++++") predict = predict_example6(train_X, train_Y) print("\n+++++ Example 7++++") predict = predict_example7(train_X, train_Y)
dfNorm = df.copy() dfNorm[x_column_names] = dfNorm[x_column_names] / X1X2_mean train_X1X2 = dfNorm[x_column_names] # X1, X2 training set train_Y = dfNorm[y_column_name].reshape(-1, 1) # Y (Output) training set C_List = train_method(train_X1X2, train_Y) classA, classB = seperateClass(dfNorm) X1A, X2A = splitFeature(classA) X1B, X2B = splitFeature(classB) # margin = w0 + w1*X1 + w2*X2 # margin/w2 = w0/-w2 + (w1/-w2)*X1 - X2 # ถ้าให้ 0 = w0/w2 + (w1/w2)*X1 - X2 มันคือสมการเส้นตรง ที่ทำนาย X2 # w0/-w2 + (w1/-w2)*X1 -> the decision boundary Equation X1_norm = dfNorm['X1'] X2_norm = dfNorm['X2'] FX_List = getDecisionFunc(X1_norm, C_List) am.visualize(X1A, X2A, X1B, X2B, X1_norm, X2_norm, FX_List) #plot2Class(classA, classB, X1_norm, FX_List[len(FX_List)-1]) # normalize classA and classB #w0, w1, w2 = C_List[len(C_List)-1] #plot2Class(classA, classB, X1, FX[len(FX)-1]) # normalize classA and classB # for visualization #plt.plot(data_X, Y, 'bs', data_X, fx_final, 'r-') #plt.show()