from isanet.neural_network import MLPRegressor
from isanet.model import Mlp
from isanet.optimizer import SGD
from isanet.utils.model_utils import printMSE, printAcc, plotMse, save_data, load_data
from isanet.optimizer import EarlyStopping
from isanet.model_selection import Kfold, GridSearchCV

import numpy as np

dataset = np.genfromtxt('../dataset/cup10/ML-CUP19-TR_tr_vl_10.csv',
                        delimiter=',')
split = load_data("../dataset/cup10/4folds.index")
X_train = dataset[:, :-2]
Y_train = dataset[:, -2:]

es = EarlyStopping(0.009, 200)

mlp_r = MLPRegressor(X_train.shape[1], Y_train.shape[1])

grid = {
    "n_layer_units": [[80], [100]],
    "learning_rate": [0.03, 0.06, 0.08, 0.098],
    "max_epoch": [30000],
    "momentum": [0.2, 0.4, 0.6, 0.8, 0.9],
    "nesterov": [True],
    "kernel_regularizer": [0.0001, 0.0005, 0.0009, 0.0013],
    "activation": ["sigmoid"],
    "early_stop": [es],
}
gs = GridSearchCV(estimator=mlp_r, param_grid=grid, cv=split, verbose=1)
result = gs.fit(X_train, Y_train)
X_train, Y_train = load_monk("1", "train")
X_test, Y_test = load_monk("1", "test")

#create the model
model = Mlp()
# Specify the range for the weights and lambda for regularization
# Of course can be different for each layer
kernel_initializer = 0.003 
kernel_regularizer = 0.001

# Add many layers with different number of units
model.add(4, input= 17, kernel_initializer, kernel_regularizer)
model.add(1, kernel_initializer, kernel_regularizer)

es = EarlyStopping(0.00009, 20) # eps_GL and s_UP

#fix which optimizer you want to use in the learning phase
model.setOptimizer(
    SGD(lr = 0.83,          # learning rate
        momentum = 0.9,     # alpha for the momentum
        nesterov = True,    # Specify if you want to use Nesterov
        sigma = None        # sigma for the Acc. Nesterov
    ))

#start the learning phase
model.fit(X_train,
          Y_train, 
          epochs=600, 
          #batch_size=31,
          validation_data = [X_test, Y_test],