test_X = test_X.reshape(test_X.shape[0], n_lag, features) # design network # in stateful model one should but batch_size into input shape and the same value also should be put on batch_size down model = Sequential() model.add( LSTM(512, batch_input_shape=(batch_size, train_X.shape[1], train_X.shape[2]), activation='tanh', stateful=True, return_sequences=False)) model.add(Dropout(0.2)) model.add(Dense(n_seq)) model.add(Activation('linear')) #model.add(LeakyReLU()) model.compile(loss='mse', optimizer='adam') # fit network model.fit(train_X, train_y, epochs=1, batch_size=batch_size, validation_data=(test_X, test_y), verbose=2, shuffle=False) model.reset_states() # plot history #pyplot.plot(history.history['loss'], label='train') #pyplot.plot(history.history['val_loss'], label='test') #pyplot.legend() #pyplot.show()
print('Train size: (%d x %d)' % (X_train.shape[0], X_train.shape[1])) print('Test size: (%d x %d)' % (X_test.shape[0], X_test.shape[1])) regressor = SVR(kernel='rbf') regressor.fit(X_train, y_train) y_pred = regressor.predict(X_test) r2_test = mean_squared_error(y_test, y_pred) K.clear_session() model = Sequential() model.add( Dense(50, input_shape=(X_test.shape[1], ), activation='relu', kernel_initializer='lecun_uniform')) model.add(Dense(50, input_shape=(X_test.shape[1], ), activation='relu')) model.add(Dense(1)) model.compile(optimizer=Adam(lr=0.001), loss='mean_squared_error') model.fit(X_train, y_train, batch_size=12, epochs=24, verbose=0) y_pred = model.predict(X_test) print('R-Squared: %f' % (mean_squared_error(y_test, y_pred))) plt.figure(figsize=(16, 8)) plt.plot(sc.inverse_transform(y_test), label='Resampled') plt.plot(sc.inverse_transform(y_pred), label='Forecast') plt.legend(loc='best') plt.show() plot = False else: sys.exit("Error: Invalid model '" + model_type + "' specified!") if plot: plt.figure(figsize=(16, 8)) plt.plot(heart_rate, label='Original')