コード例 #1
0
def evaluate_model(Model, X_Train_REFINED, Y_Train, X_Val_REFINED,
                   Y_Validation, X_Test_REFINED, Y_Test):
    ES = EarlyStopping(monitor='val_loss', mode='min', verbose=0, patience=30)
    History = Model.fit(X_Train_REFINED,
                        Y_Train,
                        batch_size=128,
                        epochs=250,
                        verbose=0,
                        validation_data=(X_Val_REFINED, Y_Validation),
                        callbacks=[ES])
    y_pred = Model.predict(X_Test_REFINED)
    CNN_NRMSE, CNN_R2 = NRMSE(Y_Test, y_pred)
    print('NRMSE > %.3f' % (CNN_NRMSE))
    return CNN_NRMSE, History
コード例 #2
0
                                callbacks=[ES])
        Y_Val_Pred_CNN = model.predict(CNN_Val, batch_size=128, verbose=0)
        Y_Pred_CNN = model.predict(CNN_Test, batch_size=128, verbose=0)

        Y_Val_Save[:, cnt + 1] = Y_Val_Pred_CNN.reshape(-1)
        Y_Test_Save[:, cnt + 1] = Y_Pred_CNN.reshape(-1)

        #print(model.summary())
        # Plot the Model
        #        plt.plot(CNN_History.history['loss'], label='train')
        #        plt.plot(CNN_History.history['val_loss'], label='Validation')
        #        plt.legend()
        #        plt.show()

        # Measuring the REFINED-CNN performance (NRMSE, R2, PCC, Bias)
        CNN_NRMSE, CNN_R2 = NRMSE(Y_Test, Y_Pred_CNN)
        MAE = mean_absolute_error(Y_Test, Y_Pred_CNN)
        print(CNN_NRMSE, "NRMSE of " + modell + SEL_CEL)
        print(CNN_R2, "R2 of " + modell + SEL_CEL)
        Y_Test = np.reshape(Y_Test, (Y_Pred_CNN.shape))
        CNN_ER = Y_Test - Y_Pred_CNN
        CNN_PCC, p_value = pearsonr(Y_Test, Y_Pred_CNN)

        print(CNN_PCC, "PCC of " + modell + SEL_CEL)
        Y_Validation = Y_Validation.reshape(len(Y_Validation), 1)
        Y_Test = Y_Test.reshape(len(Y_Test), 1)
        Bias = Bias_Calc(Y_Test, Y_Pred_CNN)

        Results_Data[0, :] = [CNN_NRMSE, MAE, CNN_PCC, CNN_R2, Bias]
        cnt += 1
    Results = pd.DataFrame(data=Results_Data,