Exemplo n.º 1
0
def batch_grid(kernel, wine):
    if (kernel == 'rbf'):
        C_range = np.logspace(0, 4, 5)
        gamma_range = np.logspace(-2, 2, 5)
        param_grid = [{'C': C_range, 'gamma': gamma_range, 'kernel': ['rbf']}]
    elif (kernel == 'poly'):
        C_range = np.logspace(-1, 3, 5)
        gamma_range = np.logspace(-2, 2, 5)
        param_grid = [{
            'C': C_range,
            'gamma': gamma_range,
            'kernel': ['poly'],
            'degree': [4, 5, 6],
            'coef0': [0, 1, 2, 3, 4]
        }]
    elif (kernel == 'sigmoid'):
        C_range = np.logspace(3, 7, 5)
        gamma_range = np.logspace(-3, 1, 5)
        param_grid = [{
            'C': C_range,
            'gamma': gamma_range,
            'kernel': ['sigmoid'],
            'coef0': [-1, 0, 1, 2, 3]
        }]
    df = u.get_wine(wine)
    X_tr_norm, Y_tr, X_te_norm, Y_te = prepare_data(df)

    grid_search = model_selection.GridSearchCV(
        svm.SVR(epsilon=0.2, verbose=False, max_iter=100000),
        param_grid,
        n_jobs=2,
        verbose=2,
        scoring='neg_mean_absolute_error',
        return_train_score=True,
        cv=5)

    t0 = time()

    grid_search.fit(X_tr_norm, Y_tr)

    print("done in %0.3fs" % (time() - t0))
    print("Best score: %0.3f" % grid_search.best_score_)
    print("Best parameters set:")
    best_parameters = grid_search.best_estimator_.get_params()
    print(best_parameters)

    u.savegrid(grid_search, kernel, wine)

    return grid_search
Exemplo n.º 2
0
def grid_search_batch_size(prev_grid, wine):
    mlp = MLPRegressor(**prev_grid.best_estimator_.get_params())
    X_train, Y_train, X_test, Y_test = prepare_data(u.get_wine(wine))
    params_grid = {}
    grid_search = model_selection.GridSearchCV(mlp, params_grid, return_train_score=True, cv=5,
                                               scoring='neg_mean_absolute_error', n_jobs=-1, verbose=5)
    t0 = time()

    grid_search.fit(X_train, Y_train)

    u.savegrid(grid_search, "nn", wine, "batch_size")
    print("done in %0.3fs" % (time() - t0))
    print()

    print("Best score: %0.3f" % grid_search.best_score_)
    print("Best parameters set:")
    best_parameters = grid_search.best_estimator_.get_params()
    print(best_parameters)
    return grid_search
Exemplo n.º 3
0
def grid_search_alpha_lr(params, wine):
    params_grid = {'alpha': np.logspace(-5, -3, 100), 'learning_rate_init': np.linspace(0.1, 1, 10)}
    mlp = MLPRegressor(**params)
    X_train, Y_train, X_test, Y_test = prepare_data(u.get_wine(wine))
    grid_search = model_selection.GridSearchCV(mlp, params_grid, return_train_score=True, cv=5,
                                               scoring='neg_mean_absolute_error', n_jobs=-1,
                                               verbose=1)
    t0 = time()

    grid_search.fit(X_train, Y_train)

    u.savegrid(grid_search, "nn", wine, "alpha_lr")
    print("done in %0.3fs" % (time() - t0))
    print()

    print("Best score: %0.3f" % grid_search.best_score_)
    print("Best parameters set:")
    best_parameters = grid_search.best_estimator_.get_params()
    print(best_parameters)
    return grid_search
Exemplo n.º 4
0
def grid_search_layers(grid_search, wine):
    min_1 = 10
    min_2 = 10

    max_1 = 20
    max_2 = 20

    tuples = []
    for l1 in np.linspace(min_1, max_1, 10, dtype=int):
        for l2 in np.linspace(min_2, max_2, 10, dtype=int):
            if l2 == 0:
                tuples.append((l1,))
            else:
                tuples.append((l1, l2))

    #layers_grid = [{'hidden_layer_sizes': tuples, 'activation': ['logistic', 'tanh', 'relu']}]
    layers_grid = [{'hidden_layer_sizes': tuples, 'activation': ['relu']}]

    mlp = MLPRegressor(max_iter=10000, solver='adam', tol=1e-4, random_state=999)

    if (grid_search == None):
        grid_search = model_selection.GridSearchCV(mlp, layers_grid, return_train_score=True, cv=5,
                                                   scoring='neg_mean_absolute_error', n_jobs=-1, verbose=3)
    else:
        grid_search.set_params(**layers_grid)
    X_train, Y_train, X_test, Y_test = prepare_data(u.get_wine(wine))

    t0 = time()

    grid_search.fit(X_train, Y_train)
    u.savegrid(grid_search, 'nn', wine, 'activation_layers')

    print("done in %0.3fs" % (time() - t0))
    print()

    print("Best score: %0.3f" % grid_search.best_score_)
    print("Best parameters set:")
    best_parameters = grid_search.best_estimator_.get_params()
    print(best_parameters)
    return grid_search
Exemplo n.º 5
0
def grid_searh_NN(wine,model_setup,param_grid,max_epochs =1000,n_cores=-1,verbose=0) :
    mlp = MLPRegressor(**model_setup)
    mlp.set_params(max_iter=max_epochs)
    X_train, Y_train, X_test, Y_test = prepare_data(u.get_wine(wine))

    grid_search = model_selection.GridSearchCV(mlp, param_grid, return_train_score=True, cv=5,
                                               scoring='neg_mean_absolute_error', n_jobs=n_cores, verbose=verbose)
    t0 = time()

    grid_search.fit(X_train, Y_train)
    f_path = ""
    for i in param_grid.keys() :
        f_path += i + "_"
    u.savegrid(grid_search, "nn", wine, f_path)
    print("done in %0.3fs" % (time() - t0))
    print()

    print("Best score: %0.3f" % grid_search.best_score_)
    print("Best parameters set:")
    best_parameters = grid_search.best_estimator_.get_params()
    print(best_parameters)
    return grid_search
Exemplo n.º 6
0
def grid_search_beta(prev_grid, wine):
    prev_params = prev_grid.best_estimator_.get_params()
    prev_params['max_iter'] = 10000
    mlp = MLPRegressor(**prev_params)
    X_train, Y_train, X_test, Y_test = prepare_data(u.get_wine(wine))
    params_grid = {'beta_1': np.linspace(0.5, 0.9999999999, 20), 'beta_2': np.linspace(0.51, 0.99999999999, 20)}
    grid_search = model_selection.GridSearchCV(mlp, params_grid, return_train_score=True, cv=5,
                                               scoring='neg_mean_absolute_error', n_jobs=-1,
                                               verbose=5)
    t0 = time()

    grid_search.fit(X_train, Y_train)

    u.savegrid(grid_search, "nn", wine, "beta")
    print("done in %0.3fs" % (time() - t0))
    print()

    print("Best score: %0.3f" % grid_search.best_score_)
    print("Best parameters set:")
    best_parameters = grid_search.best_estimator_.get_params()
    print(best_parameters)
    return grid_search