def save_nn(nn_obj, path): """ save NN object as file nn_obj, NN path: str, PATH to the saved file """ if isinstance(nn_obj, dict): prn.saveNN(nn_obj, path) elif isinstance(nn_obj, Net_tr): tr.save(nn_obj, path) else: print("canceled")
def save_to_file(self, filename='narxNet'): pr.saveNN(self.net, filename=filename)
### # Create and train NN # create feed forward neural network with 1 input, 2 hidden layers with # 4 neurons each and 1 output # the NN has a recurrent connection with delay of 1 timesteps from the output # to the first layer net = prn.CreateNN([3, 5, 5, 2], dIn=[0], dIntern=[], dOut=[1]) # Train NN with training data P=input and Y=target # Set maximum number of iterations k_max to 500 # Set termination condition for Error E_stop to 1e-5 # The Training will stop after 500 iterations or when the Error <=E_stop net = prn.train_LM(P, Y, net, verbose=True, k_max=500, E_stop=1e-5) ### # Save outputs to certain file prn.saveNN(net, "D:/School/Masterproef/Python/pyrenn/SavedNN/compair.csv") print("savegelukt") ### # Calculate outputs of the trained NN for train and test data y = prn.NNOut(P, net) ytest = prn.NNOut(Ptest, net) time_stop[0] = time.time() cpu_percent[0] = psutil.cpu_percent() virtual_mem[0] = psutil.virtual_memory() # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # example_friction.py time_start[1] = time.time()
def Tranning_by_Neural_Network(): #------------------------------讀檔創建Dataframe--------------------------------- #filepath='C:\\Users\\richard.weng\\Documents\\Python Scripts\\python_projects\\(1) NIVG Project\\ANN\\' file_data = file_name.get() + '.csv' df0 = pd.read_csv(file_data) #選擇受測人 #df = df[df.Name=='Nick'] df = df0.iloc[:, 1:] #移除first column of tester print(df.T.tail()) print('--------------------------------------------') print('df 長度為:', len(df)) print('--------------------------------------------') P = df.T.iloc[1:features_Num.get() + 1, 0:len(df)] print(P.tail()) print('input的格式:', P.shape) print('--------------------------------------------') Y = df.T.iloc[0:1, 0:len(df)] print(Y.tail()) print('output的格式:', Y.shape) print('--------------------------------------------') #轉成2d array P = np.array(P) Y = np.array(Y) # 假設70%訓練,30%要驗證 (TrainingData and TestingData) x_train, x_test, y_train, y_test = train_test_split(P.T, Y.T, test_size=0.3, random_state=None) x_of_train = (x_train / np.amax(x_train, axis=0)).T x_of_test = (x_test / np.amax(x_train, axis=0)).T y_of_train = y_train.T / 600 y_of_test = y_test.T / 600 #------------------------------讀檔創建Dataframe------------------------------------ #----------------------------ANN 主程式--------------------------------------------- #8 input,2 hidden layer, 3 neuron (create NN) net = prn.CreateNN([ features_Num.get(), hiddenlayer1_features.get(), hiddenlayer2_features.get(), 1 ]) # Train by NN net = prn.train_LM(x_of_train, y_of_train, net, verbose=True, k_max=iteration.get(), E_stop=1e-10) # print out result y_prn_train = prn.NNOut(x_of_train, net) y_prn_test = prn.NNOut(x_of_test, net) # print('x train data 預測的 Predicted Y:','\n',y_prn_train*600) # print('x test data 預測的 Predicted Y:','\n',y_prn_test*600) #----------------------------ANN 主程式--------------------------------------------- #----------------------------確認執行後的結果------------------------------------------ # visualize result plt.scatter(y_of_train * 600, y_prn_train * 600) plt.scatter(y_of_test * 600, y_prn_test * 600) plt.title('ANN Simulation Result') plt.xlabel('Input glucose (mg/dL)') plt.ylabel('Predicted glucose (mg/dL)') plt.grid() plt.show() print('測試組原本的糖值:', '\n', y_of_test * 600) print('測試組預測的糖值:', '\n', y_prn_test * 600) #----------------------------確認執行後的結果------------------------------------------ #Save ANN prn.saveNN(net, file_name.get() + '_LM_parameter' + '.csv') #Check final correlation y_all = prn.NNOut((P.T / np.amax(x_train, axis=0)).T, net) * 600 plt.scatter(Y.flatten(), y_all) Name = df0['Name'].values.tolist() df_result = pd.DataFrame({ 'Name': Name, 'total_y': Y.flatten(), 'total_pre_y': y_all }) print('相關性分析:\n', df_result.corr()) #列印出多少數據 print('總共樣本數:', len(df_result)) #Save the new result into new Excel df_result.to_csv(file_name.get() + '_LM_result' + '.csv')
# # #create recurrent neural network with 1 input, 2 hidden layers with # #2 neurons each and 1 output # #the NN has a recurrent connection with delay of 1 timestep in the hidden # # layers and a recurrent connection with delay of 1 and 2 timesteps from the output # # to the first layer net = prn.CreateNN([22, 11, 1], dIn=[0], dIntern=[], dOut=[1]) # # #Train NN with training data P=input and Y=target # #Set maximum number of iterations k_max to 100 # #Set termination condition for Error E_stop to 1e-3 # #The Training will stop after 100 iterations or when the Error <=E_stop net = prn.train_LM(P, Y, net, verbose=True, k_max=100, E_stop=1e-3) prn.saveNN(net, "RNNmodel_st.mdl") print("saved") os._exit(0) # print("loading") # net = prn.loadNN("RNNmodel.mdl") # print("loaded") ### #Calculate outputs of the trained NN for train and test data y = prn.NNOut(P, net) # for i, o in zip(P, Y): # output = winner_net.activate(i) # print("input {!r}, expected output {!r}, got {!r}".format(i, o, output)) # print("expected output {!r}, got {!r}".format(o, output)) # print(y)
import pandas as pd import numpy as np import matplotlib.pyplot as plt import pyrenn as prn datosIn = pd.read_csv("pacientestrain.csv", sep=',') datosOut = pd.read_csv("pacientestarg.csv", sep=',') P = np.array(datosIn) Y = np.array(datosOut) inputs = 7 outputs = 1 net = prn.CreateNN([inputs, 10, 14, outputs], dIn=[0], dIntern=[100], dOut=[10, 20]) net = prn.train_LM(P, Y, net, verbose=True, k_max=20000, E_stop=1e-10) y = prn.NNOut(P, net) ytest = prn.NNOut(P, net) ytest = np.array(ytest) print(ytest) filename = "nnBP.csv" prn.saveNN(net, filename)
Ytest = np.array([df[9], df[10]]) psutil.cpu_percent(interval=None, percpu=True) time_start.append(time.time()) # Create and train NN net = prn.CreateNN([3, 5, 5, 2], dIn=[0], dIntern=[], dOut=[1]) net = prn.train_LM(P, Y, net, verbose=True, k_max=500, E_stop=1e-5) time_stop.append(time.time()) cores.append(psutil.cpu_percent(interval=None, percpu=True)) virtual_mem.append(psutil.virtual_memory()) ### # Save outputs to certain file prn.saveNN(net, "D:/School/Masterproef/Python/pyrenn/SavedNN/compair.csv") # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # example_friction.py for i in range(iterations): time_start.append(time.time()) # Read Example Data df = genfromtxt('example_data_friction.csv', delimiter=',') P = df[1] Y = df[2] Ptest = df[3] Ytest = df[4] psutil.cpu_percent(interval=None, percpu=True)
sourceframes *= pysptk.blackman(frameLength) # windowing sourcemcepvectors = np.apply_along_axis( pysptk.mcep, 1, sourceframes, order, alpha) # extract MCEPs of the source frames sr, tx = wavfile.read(targetfile) targetframes = librosa.util.frame( tx, frame_length=frameLength, # framing the target audio hop_length=hop_length).astype(np.float64).T targetframes *= pysptk.blackman(frameLength) # windowing targetmcepvectors = np.apply_along_axis( pysptk.mcep, 1, targetframes, order, alpha) # extract mceps of target frames # Normalising for feeding into RNN. norm = min(len(sourcemcepvectors), len(targetmcepvectors)) transsourcemcepvectorsmod = np.transpose(sourcemcepvectors[0:norm]) transtargetmcepvectorsmod = np.transpose(targetmcepvectors[0:norm]) # Training Model. net = pyrenn.CreateNN([order + 1, order + 5, order + 5, order + 1]) net = pyrenn.train_LM(transsourcemcepvectorsmod, transtargetmcepvectorsmod, net, k_max=100, verbose=True, E_stop=5) # Saving Model. pyrenn.saveNN(net, 'pyrennweights_2.csv')
Ytest = np.array([df[9], df[10]]) psutil.cpu_percent(interval=None, percpu=True) time_start.append(time.time()) # Create and train NN net = prn.CreateNN([3, 5, 5, 2], dIn=[0], dIntern=[], dOut=[1]) net = prn.train_LM(P, Y, net, verbose=True, k_max=500, E_stop=1e-5) time_stop.append(time.time()) cores.append(psutil.cpu_percent(interval=None, percpu=True)) virtual_mem.append(psutil.virtual_memory()) ### # Save outputs to certain file prn.saveNN(net, "./SavedNN/compair.csv") # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # example_friction.py for i in range(iterations): time_start.append(time.time()) # Read Example Data df = genfromtxt('example_data_friction.csv', delimiter=',') P = df[1] Y = df[2] Ptest = df[3] Ytest = df[4] psutil.cpu_percent(interval=None, percpu=True)
label='Train sets (70% of data)') plt.scatter(y_of_test * 600, y_prn_test * 600, label='Verify sets (30% of data)') plt.title('ANN Simulation Result') plt.xlabel('Input glucose (mg/dL)') plt.ylabel('Predicted glucose (mg/dL)') plt.legend() plt.grid() plt.show() print('測試組原本的糖值:', '\n', y_of_test * 600) print('測試組預測的糖值:', '\n', y_prn_test * 600) #----------------------------確認執行後的結果------------------------------------------ #Save ANN prn.saveNN(net, file_name + '_LM_parameter' + '.csv') #----------------------------確認執行後的結果------------------------------------------ #Check final correlation y_all = prn.NNOut((P.T / np.amax(x_train, axis=0)).T, net) * 600 plt.scatter(Y.flatten(), y_all) Name = df0['Name'].values.tolist() df_result = pd.DataFrame({ 'Name': Name, 'total_y': Y.flatten(), 'total_pre_y': y_all }) print('相關性分析:\n', df_result.corr()) #列印出多少數據 print('總共樣本數:', len(df_result))
# print (y[i][0]); #z=y; #np.around(y,0,z); #for i in range(len(z)): # print (z[i][0]); # if z[i][0]==1: # print diseases[i]; #for i in range(len(disease_map)): not sure #print disease_map #ytest = prn.NNOut(Ptest,net) #prn.NNOut(P, net[, P0=None, Y0=None]) prn.saveNN(net,'/usr/local/lib/python2.7/site-packages/examples/minor_final.csv') ### #Plot results #fig = plt.figure(figsize=(15,10)) #ax0 = fig.add_subplot(221) #ax1 = fig.add_subplot(222,sharey=ax0) #ax2 = fig.add_subplot(223) #ax3 = fig.add_subplot(224,sharey=ax2) #fs=18 #t = np.arange(0,480.0)/4 #480 timesteps in 15 Minute resolution #Train Data #ax0.set_title('Train Data',fontsize=fs) #ax0.plot(y[0],color='b',lw=2,label='NN Output') #ax0.plot(Y[0],color='r',marker='None',linestyle=':',lw=3,markersize=8,label='Data') #ax0.tick_params(labelsize=fs-2)