Пример #1
0
reg = 1e-4
seed = 783
ng_eps = 10e-6
l_eps = 1e-6
max_iter = 1000
verbose = 0
#############################

#########################################
# Construct the Monk1 objective function
# and define a w0 with the seed
#########################################

np.random.seed(seed=seed)
print("Load Monk DataSet")
X_train, Y_train = load_monk(monk, "train")
print("Build the model")
model = Mlp()
model.add(4, input=17, kernel_initializer=0.003, kernel_regularizer=reg)
model.add(1, kernel_initializer=0.003, kernel_regularizer=reg)

#############################
#          L-BFGS
#############################
c1 = 1e-4
c2 = .9
m = 30
ln_maxiter = 100
#############################
optimizer = LBFGS(m=m,
                  c1=c1,
Пример #2
0
import sys
from os import path
sys.path.insert(0, "../")
sys.path.insert(0, "./")

from isanet.model import Mlp
from isanet.optimizer import SGD
from isanet.datasets.monk import load_monk
from isanet.utils.model_utils import printMSE, printAcc, plotHistory
import numpy as np
import time

print("Load Monk DataSet")
X_train, Y_train = load_monk("2", "train")
X_test, Y_test = load_monk("2", "test")

print("Build the model")
tk_reg = 0  #.000001
w_start = 0.7
model = Mlp()
model.add(6, input=17, kernel_initializer=w_start, kernel_regularizer=tk_reg)
model.add(6, kernel_initializer=w_start, kernel_regularizer=tk_reg)
model.add(1, kernel_initializer=w_start, kernel_regularizer=tk_reg)

model.set_optimizer(SGD(lr=0.812, momentum=0.8, nesterov=True))
# Batch
start_time = time.time()
model.fit(
    X_train,
    Y_train,
    epochs=800,
Пример #3
0
def optimize_monk_f(monk="1",
                    reg=0,
                    seed=1,
                    optimizer=None,
                    max_iter=1000,
                    verbose=0):
    """Allows you to build a target fusion from a monk dataset,
    and then optimize it with a specific "optimizer".
    The function is built as follows:
                    f_monk = MSE_monk + reg*||w||^2
    Parameters
    ----------

    monk : String
        Allow to specify the monk dataset.
        Accepted values:
            "1" : monk1
            "2" : monk2
            "3" : monk3
    
    reg : float 
        lamda value used in the regualrization term
        reg must be in 0 <= reg <= 1

    seed : int
        Allow to specify a starting point
    
    optimizer : isanet.optimizer
        Allow to specify the optimizer used in the process.
        Must be passed!
    
    max_iter : int
        Define the max iteration in the optimization 
        process as stop criteria.

    verbose : int [0, 1, 2, 3]
        Define the verbosity of the process.

            "0" : no output
            "1" : only iteration output
            "2" : iteration and optimizer log at each iteration
            "3" : same as above, plus the line search log 

    Returns
    -------
        model.history, optimizer.history, (end - start)
            It return the model and optimizer history plus the time of the whole process
    """

    if monk not in ["1", "2", "3"]:
        raise Exception("wrong monk function - accepeted only: '1','2', '3'")
    if optimizer is None:
        raise Exception("an optimatizer must be specifed")
    np.random.seed(seed=seed)
    X_train, Y_train = load_monk(monk, "train")
    kernel_initializer = {
        "1": [0.003, 0.003],
        "2": [1 / np.sqrt(17), 1 / np.sqrt(4)],
        "3": [0.003, 0.003]
    }
    model = Mlp()
    model.add(4,
              input=17,
              kernel_initializer=kernel_initializer[monk][0],
              kernel_regularizer=reg)
    model.add(1,
              kernel_initializer=kernel_initializer[monk][1],
              kernel_regularizer=reg)

    model.set_optimizer(optimizer)
    start = time.time()
    model.fit(X_train, Y_train, epochs=max_iter, verbose=verbose)
    end = time.time()
    return model.history, optimizer.history, (end - start)