#data2 = scaler1.fit_transform(data)
        nn_model_ref2.all_intermediaire = data_train
        #nn_model_ref2.outputs_proba_train = np.array(nn_model_ref2.outputs_proba[phase]).astype(np.float16).reshape(num1 * nn_model_ref2.batch_size, -1)
        #nn_model_ref2.outputs_pred_train = np.array(nn_model_ref2.outputs_pred[phase]).astype(np.float16).reshape(num1 * nn_model_ref2.batch_size, -1)
        #if not nn_model_ref2.args.retrain_nn_ref:
        #del nn_model_ref2.all_intermediaire, data_train

    else:
        #scaler2 = StandardScaler()
        #data = data_val
        #data = np.array(nn_model_ref2.intermediaires[phase]).astype(np.uint8).reshape(num1 * nn_model_ref2.batch_size, -1)
        #data2 = scaler2.fit_transform(data)
        nn_model_ref2.all_intermediaire_val = data_val
        #nn_model_ref2.outputs_proba_val = np.array(nn_model_ref2.outputs_proba[phase]).astype(np.float16).reshape(num2 * nn_model_ref2.batch_size, -1)
        #nn_model_ref2.outputs_pred_val = np.array(nn_model_ref2.outputs_pred[phase]).astype(np.float16).reshape(
        #    num2 * nn_model_ref2.batch_size, -1)
        #if not nn_model_ref2.args.retrain_nn_ref:
        #del nn_model_ref2.all_intermediaire_val, data_val
        del nn_model_ref2.dataloaders[phase]

for i in range(4):
    print(data_train_X[data_train_preds == i].shape)
    print(data_train_labels[data_train_preds == i].shape)

    nn_model_ref2.X_train_nn_binaire = data_train_X[data_train_preds == i]
    nn_model_ref2.X_val_nn_binaire = data_val_X[data_val_preds == i]
    nn_model_ref2.Y_train_nn_binaire = data_train_labels[data_train_preds == i]
    nn_model_ref2.Y_val_nn_binaire = data_val_labels[data_val_preds == i]
    nn_model_ref2.train_general(name_input)
    nn_model_ref2.choose_model()
                            )
                        )
    if flag2:
        nn_model_ref.eval_all(["val"])
        flag2 = False
    else:
        nn_model_ref.eval(["val"])
    acc_retain.append(nn_model_ref.acc)

    if args.save_model_prune:
        torch.save({'epoch': 0 , 'acc': 0, 'state_dict': nn_model_ref.net.state_dict()},
                   os.path.join(path_save_model,
                                'Gohr_' + nn_model_ref.args.type_model + '_best_nbre_sampletrain_' + str(
                                    nn_model_ref.args.nbre_sample_train) + "_prunning_"+str(global_sparsity) + '.pth'))


    del nn_model_ref.net
    nn_model_ref.net = nn_model_ref.choose_model()
    nn_model_ref.load_nn()


fig = plt.figure(figsize=(20,20))
ax = plt.axes()
ax.plot(args.values_prunning, acc_retain)
plt.title("model: " +args.model_to_prune)
plt.xlabel("Pourcentage prunning")
plt.ylabel("Accuracy")
plt.savefig(path_save_model + "prunning" + args.model_to_prune.replace('/', "_") + ".png")