def prediction(): filepath = request.files['csv_file'].read() model = request.files['model'].read() df = pd.read_csv(filepath) dt = list() for i in range(len(df)): dt.append(df['Date'][i] + " " + df['Time'][i]) df['DateTime'] = dt df['DateTime'] = pd.to_datetime(df['DateTime']) df.index = df['DateTime'] df.drop(df[df['Volume'] == 0].index, axis=0, inplace=True) idx = df[df['Low'] == df['High']].index df.drop(idx, axis=0, inplace=True) df['Date'] = pd.to_datetime(df['Date']) df.index = df['Date'] data = df['Close'].copy() # Training and testing data train_size = int(len(data) * 0.80) train = data[:train_size] # test = data[train_size :] # Live Testing test_size = 60 test = data[len(df) - test_size:] # Data scaling scaler = MinMaxScaler() scaled_train = scaler.fit_transform(np.array(train).reshape(-1, 1)) # Test scaled_test = scaler.transform(np.array(test).reshape(-1, 1)) X_test = [i for i in test] X_test = np.array(X_test) regressor = SupervisedDBNRegression.load(model) y_pred = regressor.predict(X_test) # print('Metrices of Regressor ') # print('\nR-squared: %f\nMSE: %f' % (r2_score(y_test, y_pred), mean_squared_error(y_test, y_pred))) # print('Accuracy = {}'.format(r2_score(y_test , y_pred) * 100)) # y_test = scaler.inverse_transform(y_test.reshape(-1,1)) y_pred = scaler.inverse_transform(y_pred.reshape(-1, 1)) # plt.figure(figsize=(20,9)) # # plt.plot(train.index , train , label = 'Training data') # plt.plot(test.index[60:] , y_test , label='Test' , color ='b' ) # plt.plot(test.index[60:], y_pred , label='Regressor Predictions' , color='g') # plt.title('Actual v/s Predicted') # plt.legend() # plt.savefig('./assets/output.png') # print(y_pred) return jsonify({'Prediction': y_pred[0][0]})
def build_RBM(num_bp, epoch_pretrain=125, batch_size=24, hidDim=[100, 140]): regressor = SupervisedDBNRegression(hidden_layers_structure=hidDim, learning_rate_rbm=0.01, learning_rate=0.01, n_epochs_rbm=epoch_pretrain, n_iter_backprop=num_bp, batch_size=batch_size, activation_function='sigmoid') return regressor
def testing(csv_filepath, save_output_image, model_path): df = pd.read_csv(csv_filepath) dt = list() for i in range(len(df)): dt.append(df['Date'][i] + " " + df['Time'][i]) df['DateTime'] = dt df['DateTime'] = pd.to_datetime(df['DateTime']) df.index = df['DateTime'] df.drop(df[df['Volume'] == 0].index, axis=0, inplace=True) idx = df[df['Low'] == df['High']].index df.drop(idx, axis=0, inplace=True) df['Date'] = pd.to_datetime(df['Date']) df.index = df['Date'] data = df['Close'].copy() # Training and testing data train_size = int(len(data) * 0.80) train = data[:train_size] # Live Testing test_size = 60 test = data[len(df) - test_size:] # Data scaling scaler = MinMaxScaler() scaled_train = scaler.fit_transform(np.array(train).reshape(-1, 1)) # Test scaled_test = scaler.transform(np.array(test).reshape(-1, 1)) X_test = [i for i in test] X_test = np.array(X_test) try: FILE_PATH = model_path regressor = SupervisedDBNRegression.load(FILE_PATH) except: print('Unable to load model') y_pred = regressor.predict(X_test) print('Predicted value is {}'.format(y_pred[0][0]))
'/grafico_' + os.path.splitext(conjTreino)[0]) plt.close() path1 = './Modelos/' # path2 = 'Treinamento_2019_10_11_14_40_41/' # path3 = 'modelo_treinamento_2019_10_11_14_40_41_modelo_101_[90, 90, 90].pkl' path2 = 'Treinamento_2019_11_01_14_10_48/' path3 = 'modelo_treinamento_2019_11_01_14_10_48_modelo_1_[90, 90, 90].pkl' pathCompleto = path1 + path2 + path3 regressor = SupervisedDBNRegression.load(pathCompleto) # Teste Y_pred = regressor.predict(X_test) # if conjTreino == 'degrauUnitario.csv': # Y_pred = Y_pred / 4.6 # 4.62073146825719 r2Score = r2_score(Y_test, Y_pred) MSE = mean_squared_error(Y_test, Y_pred) print('\nDone.\nR-squared: %f\nMSE: %f' % (r2Score, MSE)) arquivoResultados = pd.DataFrame(data={ "Arquivo": [conjTreino],
# scaler = MinMaxScaler() # scaled_train = scaler.fit_transform(np.array(train).reshape(-1,1)) # log_change_train = np.log(train).pct_change().dropna() # log_change_test = np.log(test).pct_change().dropna() # print(log_change_train) # Preparing training data scaled_train = np.array(train) X_train, y_train = [], [] for i in range(60, len(scaled_train)): X_train.append(scaled_train[i - 60:i]) y_train.append(scaled_train[i]) # Training X_train = np.array(X_train) y_train = np.array(y_train) regressor = SupervisedDBNRegression(hidden_layers_structure=[100], learning_rate_rbm=0.001, learning_rate=0.001, n_epochs_rbm=60, n_iter_backprop=100, batch_size=32, activation_function='relu') regressor.fit(X_train, y_train) print('*' * 15, ' Model is saving ', '*' * 15) regressor.save('./models/nifty_24_03_2021.pkl') print('*' * 15, ' Model saved ', '*' * 15)
def testing(csv_filepath , save_output_image , model_path): df = pd.read_csv(csv_filepath) dt = list() for i in range(len(df)): dt.append(df['Date'][i]+ " " +df['Time'][i]) df['DateTime'] = dt df['DateTime'] = pd.to_datetime(df['DateTime']) df.index = df['DateTime'] df.drop(df[df['Volume']==0].index , axis=0 ,inplace=True) idx = df[df['Low']==df['High']].index df.drop(idx , axis=0 , inplace=True) df['Date'] = pd.to_datetime(df['Date']) df.index = df['Date'] data = df['Close'].copy() # Training and testing data train_size = int(len(data) * 0.80) train = data[:train_size] test = data[train_size :] # Data scaling scaler = MinMaxScaler() scaled_train = scaler.fit_transform(np.array(train).reshape(-1,1)) # Test scaled_test = scaler.transform(np.array(test).reshape(-1,1)) X_test , y_test = [] , [] for i in range(60 , len(scaled_test)): X_test.append(scaled_test[i-60: i,0]) y_test.append(scaled_test[i ,0]) X_test = np.array(X_test) y_test = np.array(y_test) print(X_test.shape) try: FILE_PATH = model_path regressor = SupervisedDBNRegression.load(FILE_PATH) print('Model loaded Successfully ') except: print('Unable to load model') try: y_pred = regressor.predict(X_test) print('Metrices of Regressor ') print('\nR-squared: %f\nMSE: %f' % (r2_score(y_test, y_pred), mean_squared_error(y_test, y_pred))) print('Accuracy = {}'.format(r2_score(y_test , y_pred) * 100)) try: y_test = scaler.inverse_transform(y_test.reshape(-1,1)) y_pred = scaler.inverse_transform(y_pred.reshape(-1,1)) plt.figure(figsize=(20,9)) # plt.plot(train.index , train , label = 'Training data') plt.plot(test.index[60:] , y_test , label='Test' , color ='b' ) plt.plot(test.index[60:], y_pred , label='Regressor Predictions' , color='g') plt.title('Actual v/s Predicted') plt.legend() if save_output_image==True: plt.savefig('./assets/output.png') plt.show() except Exception as e: print(e) except Exception as e: print(e)
# print(log_change_train) # Test # scaled_test = scaler.transform(np.array(test).reshape(-1,1)) X_test, y_test = [], [] for i in range(60, len(scaled_test)): X_test.append(scaled_test[i - 60:i, 0]) y_test.append(scaled_test[i, 0]) X_test = np.array(X_test) y_test = np.array(y_test) # print(X_test.shape) try: FILE_PATH = '/home/rahul/Desktop/dbn/nifty_20_03_2021.pkl' regressor = SupervisedDBNRegression.load(FILE_PATH) print('Model loaded Successfully ') except: print('Unable to load model') try: y_pred = regressor.predict(X_test) print('\nR-squared: %f\nMSE: %f' % (r2_score(y_test, y_pred), mean_squared_error(y_test, y_pred))) try: y_test = scaler.inverse_transform(y_test.reshape(-1, 1)) y_pred = scaler.inverse_transform(y_pred.reshape(-1, 1)) # y_test = return_log_normal(y_test) # y_pred = return_log_normal(y_pred) plt.figure(figsize=(20, 9))
X.append(cek) y.append(data[i + lag]) return np.array(X), np.array(y) lag = 3 X_train, y_train = create_sliding_windows(train, lag) y_train = np.reshape(y_train, (len(y_train), 1)) X_val, y_val = create_sliding_windows(validation, lag) X_test, y_test = create_sliding_windows(test, lag) # Training regressor = SupervisedDBNRegression(hidden_layers_structure=[30, 40], learning_rate_rbm=0.001, learning_rate=0.001, n_epochs_rbm=30, n_iter_backprop=100, batch_size=32, activation_function='relu', dropout_p=0.2) regressor.fit(X_train, y_train) # Save the model regressor.save('model.pkl') # Restore it regressor = SupervisedDBNRegression.load('model.pkl') # Test y_pred = regressor.predict(X_test) print('Done.\nMAE: %f' % mean_absolute_error(y_test, y_pred))
Y_train = np.array(train_data[0:316, [30]]) # 提取测试集特征列 # test_x = np.array(test_data[0:7, [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29]]) X_test = np.array(test_data[0:1000, [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29 ]]) #Y_test = np.array(train_data[0:6, [30]]) # Data scaling min_max_scaler = MinMaxScaler() X_train = min_max_scaler.fit_transform(X_train) # Training regressor = SupervisedDBNRegression(hidden_layers_structure=[130], learning_rate_rbm=0.05, learning_rate=0.05, n_epochs_rbm=20, n_iter_backprop=20, batch_size=16, activation_function='sigmoid') regressor.fit(X_train, Y_train) # Test X_test = min_max_scaler.transform(X_test) Y_pred = regressor.predict(X_test) print("测试集预测结果") print(Y_pred) #print('Done.\nR-squared: %f\nMSE: %f' % (r2_score(Y_test, Y_pred), mean_squared_error(Y_test, Y_pred)))
# Loading dataset boston = load_boston() X, Y = boston.data, boston.target # Splitting data X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=0) # Data scaling min_max_scaler = MinMaxScaler() X_train = min_max_scaler.fit_transform(X_train) # Training regressor = SupervisedDBNRegression(hidden_layers_structure=[200], learning_rate_rbm=0.01, learning_rate=0.01, n_epochs_rbm=100, n_iter_backprop=500, l2_regularization=0.0, batch_size=32, activation_function='relu') regressor.fit(X_train, Y_train) # Test X_test = min_max_scaler.transform(X_test) Y_pred = regressor.predict(X_test) print 'Done.\nR-squared: %f\nMSE: %f' % (r2_score( Y_test, Y_pred), mean_squared_error(Y_test, Y_pred))