Esempio n. 1
0
reduced_fit_times = {'PCA':  [], "RCA": []}
reduced_predict_times = {'PCA':  [], "RCA": []}

# transform stuff, but don't transform the ownership of this file, which is Boyko Todorov's

for a in range(num_iter):

    x_train, x_test, y_train, y_test = data_service. \
        load_and_split_data(scale_data=scale_data, transform_data=transform_data, random_slice=random_slice,
                            random_seed=random_seed, dataset=dataset, test_size=test_size)

    if dataset == 'kdd':
        y_train, y_test = convert_to_binary_service.convert(y_train, y_test, 11)

    nn_learner = NNLearner(hidden_layer_sizes=nn_hidden_layer_sizes, max_iter=200, solver=nn_solver,
                           activation=nn_activation,
                           alpha=alpha, learning_rate=nn_learning_rate, learning_rate_init=nn_learning_rate_init)

    nn_accuracy_score, nn_fit_time, nn_predict_time = nn_learner.fit_predict_score(x_train.copy(), y_train.copy(),
                                                                                   x_test.copy(), y_test.copy())
    original_accuracies.append(nn_accuracy_score)
    original_non_reduced_fit_times.append(nn_fit_time)
    original_non_reduced_predict_times.append(nn_predict_time)

    print("Iter {0}. Orig score: {1}, fit_time: {2}, predict_time: {3}".format(a, nn_accuracy_score, nn_fit_time, nn_predict_time))

    for reduction_algo in reduction_algos:

        nn_learner = NNLearner(hidden_layer_sizes=nn_hidden_layer_sizes, max_iter=200, solver=nn_solver,
                               activation=nn_activation,
                               alpha=alpha, learning_rate=nn_learning_rate,
Esempio n. 2
0
dataset = 'breast_cancer'
test_size = 0.5

nn_activation = 'relu'
alpha = 0.0001
nn_hidden_layer_sizes = (10, )
nn_learning_rate = 'constant'
nn_learning_rate_init = 0.01
nn_solver = 'lbfgs'

#{'activation': 'relu', 'alpha': 0.0001, 'hidden_layer_sizes': (100,), 'learning_rate_init': 0.01, 'solver': 'lbfgs'}

nn_learner = NNLearner(hidden_layer_sizes=nn_hidden_layer_sizes,
                       max_iter=200,
                       solver=nn_solver,
                       activation=nn_activation,
                       alpha=alpha,
                       learning_rate=nn_learning_rate,
                       learning_rate_init=nn_learning_rate_init)

x_train, x_test, y_train, y_test = data_service.load_and_split_data(
    scale_data=scale_data,
    transform_data=transform_data,
    random_slice=random_slice,
    random_seed=random_seed,
    dataset=dataset,
    test_size=test_size)

nn_accuracy_score, nn_fit_time, nn_predict_time = nn_learner.fit_predict_score(
    x_train, y_train, x_test, y_test)
Esempio n. 3
0
from sklearn.multiclass import OneVsRestClassifier

dt_learner = DTLearner()
dt_learnerOnevsRest = OneVsRestClassifier(dt_learner.estimator)

#--------------------------------
nn_hidden_layer_sizes = (100, )
nn_solver = 'lbfgs'
nn_activation = 'relu'
alpha = 0.0001  # regularization term coefficient
nn_learning_rate = 'constant'
nn_learning_rate_init = 0.0001
nn_learner = NNLearner(hidden_layer_sizes=nn_hidden_layer_sizes,
                       max_iter=200,
                       solver=nn_solver,
                       activation=nn_activation,
                       alpha=alpha,
                       learning_rate=nn_learning_rate,
                       learning_rate_init=nn_learning_rate_init)
nn_learner_non_scaled = NNLearner(hidden_layer_sizes=nn_hidden_layer_sizes,
                                  max_iter=200,
                                  solver=nn_solver,
                                  activation=nn_activation,
                                  alpha=alpha,
                                  learning_rate=nn_learning_rate,
                                  learning_rate_init=nn_learning_rate_init)

#-------------------------------
n_neighbors = 5
weights = 'distance'
algorithm = 'auto'