def plot2(typ, frac, epochs = 2000):

    bm = fulltab.loc[(fulltab.task == "finetuning") & (fulltab.finetuned_type == "C-OLS")].reset_index()
    bm.iloc[bm['mae_val'].idxmin()].to_dict()
    # now load the model losses from file.
    rl = np.load(f"OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt\python\outputs\models\mlp{typ}\\nodropout\sims_frac{frac}\\tuned\setting0\\running_losses.npy", allow_pickle=True).item()
    visualizations.plot_running_losses(rl["mae_train"][:, :epochs], rl["mae_val"][:, :epochs], False, False)

    bm = fulltab.loc[(fulltab.typ == 0) & (fulltab.architecture == 2)].reset_index()
    bestmlp0 = bm.iloc[bm['mae_val'].idxmin()].to_dict()["mae_val"]
    #rf = fulltab.loc[(fulltab.model == "rf")]["mae_val"].item()
    
    plt.hlines(bestmlp0, 0, epochs,colors="orange", linestyles="dashed", label="Best selected network", linewidth=1.2)
    #plt.hlines(rf, 0, 2000,colors="yellow", linestyles="dashed", label="Random Forest", linewidth=1.2)
    
    bm = fulltab.loc[(fulltab.typ == typ) & (fulltab.simsfrac == frac) & (fulltab.finetuned_type == "C-OLS")].reset_index()
    bestols = bm.iloc[bm['mae_val'].idxmin()].to_dict()["mae_val"]
    posols = np.max(np.where(rl["mae_val"][:, :epochs] > bestols))
    plt.arrow(x=posols, y=1.3, dx=0, dy=-(1.3-bestols), linewidth=0.8, color="gray")
    plt.text(x=posols, y=1.35, s="OLS", fontstyle="italic")
    
    try:
        bm = fulltab.loc[(fulltab.typ == typ)& (fulltab.simsfrac == frac)  & (fulltab.finetuned_type == "B-fW2")].reset_index()
        bestfw2 = bm.iloc[bm['mae_val'].idxmin()].to_dict()["mae_val"]
        posfw2 = np.max(np.where(rl["mae_val"][:, :epochs] > bestfw2))
        plt.arrow(x=posfw2, y=2, dx=0, dy=-(2-bestfw2), linewidth=0.8, color="gray")
        plt.text(x=posfw2, y=2.05, s="Feature \nextraction", fontstyle="italic")
    except:
        print("no fw2.")
    
    bm = fulltab.loc[(fulltab.typ == typ)& (fulltab.simsfrac == frac)  & (fulltab.finetuned_type == "D-MLP2")].reset_index()
    bestmlp2 = bm.iloc[bm['mae_val'].idxmin()].to_dict()["mae_val"]
    posmlp2 = np.max(np.where(rl["mae_val"][:, :epochs] > bestmlp2))
    plt.arrow(x=posmlp2, y=1.2, dx=0, dy=-(1.2-bestmlp2), linewidth=0.8, color="gray")
    plt.text(x=posmlp2, y=1.25, s="MLP", fontstyle="italic")

    prel = fulltab.loc[(fulltab.model == "preles") & (fulltab.typ == 0)]["mae_train"].item()
    posprel = np.max(np.where(rl["mae_val"][:, :epochs] > prel))
    plt.arrow(x=posprel, y=1.6, dx=0, dy=-(1.6-prel), linewidth=0.8, color="gray")
    plt.text(x=posprel, y=1.65, s="PRELES", fontstyle="italic")

    plt.legend()
                          r"nodropout\sims_frac100\tuned\setting1\sparse\5")

#%%
visualizations.predictions("mlp", 0, r"noPool\sigmoid")
visualizations.predictions("mlp", 0, r"AdaptPool\nodropout")
visualizations.predictions("mlp", 0, r"AdaptPool\dropout")

visualizations.predictions("mlp", 2, r"hyytiala")
visualizations.predictions("mlp", 2, r"bilykriz\noPool")

visualizations.predictions("lstm", 2, r"bilykriz")
visualizations.predictions("cnn", 2, r"bilykriz")
visualizations.predictions("lstm", 2, r"hyytiala")
visualizations.predictions("cnn", 2, r"hyttiala")

visualizations.predictions("rf", 2)

visualizations.predictions("mlp", 5, r"paramsFix\nodropout")
visualizations.predictions("mlp", 6, r"paramsFix\nodropout")

#%%
visualizations.performance_boxplots(typ=2)

#%%
visualizations.plot_running_losses(running_losses["mae_train"],
                                   running_losses["mae_val"], "mlp")

#%%
visualizations.plot_running_losses(running_losses["mae_train"],
                                   running_losses["mae_val"], "mlp")
running_losses, performance, y_tests, y_preds = dev_cnn.train_model_CV(
    hparams,
    model_design,
    X,
    Y,
    splits,
    eval_set,
    data_dir=os.path.join(data_dir, r"python\outputs\models\mlp6"),
    save=False)

#%%
print(np.mean(np.array(performance), axis=0))
# [0.10712399 2.9577253  0.07802816 2.1554127 ]
visualizations.plot_running_losses(running_losses["mae_train"],
                                   running_losses["mae_val"],
                                   legend=True,
                                   plot_train_loss=True)

#%%
hparams = {
    "batchsize": 256,
    "epochs": 8000,
    "history": 1,
    "hiddensize": [128],
    "learningrate": 0.01
}
model_design = {
    "dimensions": [X.shape[1], 128, Y.shape[1]],
    "activation": nn.ReLU,
    "featuresize": None
}
hparams = {"batchsize": 256, 
           "epochs":3000, 
           "history":2, 
           "hiddensize":[32],
           "learningrate":0.01}
model_design = {"dimensions": [X.shape[1], 32, Y.shape[1]],
                "activation": nn.ReLU,
                "featuresize": 7}

#eval_set = {"X_test":X_test, "Y_test":Y_test}
eval_set = {"X_test":X_sims_ss_test, "Y_test":Y_sims_ss_test}
running_losses, performance, y_tests, y_preds = dev_mlp.train_model_CV(hparams, model_design, 
                                                                       X_sims_ss, Y_sims_ss, eval_set, 
                                                                       0.2, data_dir, False)

visualizations.plot_running_losses(running_losses["mae_train"], running_losses["mae_val"], True, True)

#%%
hparams = {"batchsize": 512, 
           "epochs":500, 
           "history":10, 
           "hiddensize":64, 
           "learningrate":0.01}
model_design = {"dimensions":[X.shape[1], 64, Y.shape[1]],
                "activation":nn.ReLU,
                "channels":[14,28],
                "kernelsize":2}

splits=5
eval_set = None
Exemple #5
0
                                     X_P2.to_numpy(), Y_P2.to_numpy(), "D1P2")

running_losses_d2p1 = training.train(hparams_setting, model_design,
                                     X_P1.to_numpy(), Y_Preles_P1.to_numpy(),
                                     "D2P1")
running_losses_d2p2 = training.train(hparams_setting, model_design,
                                     X_P2.to_numpy(), Y_Preles_P2.to_numpy(),
                                     "D2P2")

#running_losses_d1p1s = training.train(hparams_setting, model_design, X_P1s.to_numpy(), Y_P1s.to_numpy(), "D1P1s")
#running_losses_d1p2s = training.train(hparams_setting, model_design, X_P2s.to_numpy(), Y_P2s.to_numpy(), "D1P2s")

#running_losses_d2p1s = training.train(hparams_setting, model_design, X_P1s.to_numpy(), Y_Preles_P1s.to_numpy(), "D2P1s")
#running_losses_d2p2s = training.train(hparams_setting, model_design, X_P2s.to_numpy(), Y_Preles_P2s.to_numpy(), "D2P2s")
#%%
visualizations.plot_running_losses(running_losses_d1p1["mae_train"],
                                   running_losses_d1p1["mae_val"])
#%%
#visualizations.plot_running_losses(running_losses_d1p1s["mae_val"], running_losses_d1p1["mae_val"], labels = ["365 data points", "730 data points"])
#visualizations.plot_running_losses(running_losses_d2p1s["mae_val"], running_losses_d2p1["mae_val"], labels = ["365 data points", "730 data points"])

#%% Predict with fitted models to D1P2.
preds_d1m1, mae_d1m1, nse_d1m1 = prediction.predict(hparams_setting,
                                                    model_design,
                                                    X_P2.to_numpy(),
                                                    Y_P2.to_numpy(), "D1P1")
preds_d1m2, mae_d1m2, nse_d1m2 = prediction.predict(hparams_setting,
                                                    model_design,
                                                    X_P2.to_numpy(),
                                                    Y_P2.to_numpy(), "D1P2")
print(np.mean(mae_d1m1))
print(np.mean(mae_d1m2))
Exemple #6
0
m = nn.MaxPool1d(3, stride=2)
inp = torch.randn(20, 16, 50)
inp.shape
outp = m(inp)
outp.shape


#%%
import setup.models as models
import finetuning
import setup.dev_mlp as dev_mlp
import visualizations

x = torch.tensor(X).type(dtype=torch.float)
y = torch.tensor(Y).type(dtype=torch.float)

hparams, model_design, X, Y, X_test, Y_test = finetuning.settings("mlp", 5, 4000, data_dir)
model_design["dimensions"] = [12,32,1]

X, Y = preprocessing.get_simulations(data_dir = os.path.join(data_dir, f"data/simulations/uniform_params"), drop_parameters=False)

X, Y = X[:500], Y[:500]     
X_f = np.random.rand(366, 12)
X_f[:,:7] = X_test
X_test = X_f
running_losses, performance, y_tests, y_preds = dev_mlp.train_model_CV(hparams, model_design, 
                                                                       X, Y, {"X_test":X_test, "Y_test":Y_test}, 
                                                                       0.4, data_dir, False)

visualizations.plot_running_losses(running_losses["mae_train"], running_losses["mae_val"], True, True)