def main(): dataset = PreProcessing("wine_dataset.txt") dataset.normalize(ignore_first_column=True) dataset.switch_first_last_column() dataset.normalize_class() train, test = training.holdout(0.7, dataset.normalized_dataframe) nn = Mlp(13, 10, 3, n_hidden_layers=1) nn.backpropagation(train.values.tolist(), eta=0.5) example = test.values.tolist() print(len(example)) input() #print(example) #print(example[17]) #feed example nn.feed_forward(example[0][:(-1 * 3)]) print(example[0]) nn.show_class() nn.feed_forward(example[40][:(-1 * 3)]) print(example[40]) print(test.iloc[[40]].values.tolist()) input() nn.show_class() nn.feed_forward(example[31][:(-1 * 3)]) print(example[31]) nn.show_class() print(training.accuracy(nn, test, n_classes=3)) """
def wine_test(): # Carregando e Normalizando os dados da base de vinhos dataset = PreProcessing("wine_dataset.txt") dataset.normalize(ignore_first_column=True) dataset.switch_first_last_column() dataset.normalize_class() # Atributos a serem variados nos testes n_layers = [1, 2] hidden_layer = [10, [5, 5]] momentums = [0.3, 0.5, 0.7] max_iterations = [100, 250, 500] etas = [0.3, 0.5, 0.7] ps = [0.5, 0.7, 0.9] # Teste for layer in n_layers: for momentum in momentums: for eta in etas: for max_iteration in max_iterations: for p in ps: train, test = training.holdout( p, dataset.normalized_dataframe) example = test.values.tolist() print("INPUT NEURONS = 13 HIDDEN NEURONS = " + str(int(10 / layer)) + " OUTPUT NEURONS = 3 HIDDEN LAYER = " + str(layer) + " ETA = " + str(eta) + " MAX ITERATIONS = " + str(max_iteration) + " MOMENTUM = " + str(momentum) + " P = " + str(p)) print() nn = Mlp(13, hidden_layer[layer - 1], 3, n_hidden_layers=layer) nn.backpropagation(train.values.tolist(), eta=eta, max_iterations=max_iteration) print("ACCURACY =", training.accuracy(nn, test, n_classes=3)) print() print("Input 1") nn.feed_forward(example[0][:(-1 * 3)]) print(example[0]) print("Result 1") nn.show_class() print() print("Input 2") print(example[15]) nn.feed_forward(example[15][:(-1 * 3)]) print("Result 2") nn.show_class() print() print( "******************************************************//******************************************************" ) print()
def music_test(): # Carregando e normalizando os dados da base de musicas dataset = PreProcessing("default_features_1059_tracks.txt") dataset.normalize(ignore_first_column=False) # Atributos a serem variados no teste n_layers = [1, 2] hidden_layer = [20, [10, 10]] momentums = [0.3, 0.5, 0.7] max_iterations = [100, 250, 500] etas = [0.3, 0.5, 0.7] ps = [0.5, 0.7, 0.9] # Teste for layer in n_layers: for momentum in momentums: for eta in etas: for max_iteration in max_iterations: for p in ps: train, test = training.holdout( p, dataset.normalized_dataframe) example = test.values.tolist() print("INPUT NEURONS = 68 HIDDEN NEURONS = " + str(int(10 / layer)) + " OUTPUT NEURONS = 2 HIDDEN LAYER = " + str(layer) + " ETA = " + str(eta) + " MAX ITERATIONS = " + str(max_iteration) + " MOMENTUM = " + str(momentum) + " P = " + str(p)) print() nn = Mlp(68, hidden_layer[layer - 1], 2, n_hidden_layers=layer) nn.backpropagation(train.values.tolist(), eta=eta, max_iterations=max_iteration) print("SQUARED ERROR =", training.squared_error(nn, test, n_classes=2)) print() print("Input 1") nn.feed_forward(example[0][:(-1 * 2)]) print(example[0]) print("Result 1") nn.show_class() print() print("Input 2") print(example[15]) nn.feed_forward(example[15][:(-1 * 2)]) print("Result 2") nn.show_class() print() print( "******************************************************//******************************************************" ) print()
def main(): #read dataset and preprocess it dataset = PreProcessing("seeds_dataset.txt", separator='\s+') dataset.normalize() dataset.normalize_class() #divide dataset into training and test sets train, test = training.holdout(0.7, dataset.normalized_dataframe) nn = Rbf(7, 3) nn.train(train, eta=0.5, max_iterations=500) print("RBF:", training.accuracy(nn, test, 3)) mm = Mlp(7, 3, 3) mm.backpropagation(train.values.tolist(), max_iterations=500) print("MLP:", training.accuracy(mm, test, 3))
def seed_test(): # Carregando e Normalizando os dados da base de vinhos dataset = PreProcessing("seeds_dataset.txt", separator='\s+') dataset.normalize() dataset.normalize_class() # Atributos a serem variados nos testes n_layers = [1, 2] hidden_layer = [3, [6, 6]] momentums = [0.3, 0.5] max_iterations = [100, 250, 500] etas = [0.3, 0.5] ps = [0.7, 0.9] rbf_accuracy = 0 mlp_accuracy = 0 tests = 0 # Teste for layer in n_layers: for momentum in momentums: for eta in etas: for max_iteration in max_iterations: for p in ps: tests += 1 print("Test number", tests) train, test = training.holdout( p, dataset.normalized_dataframe) print("INPUT NEURONS = 7 HIDDEN NEURONS = " + str(int(6 / layer)) + " OUTPUT NEURONS = 3 HIDDEN LAYER = " + str(layer) + " ETA = " + str(eta) + " MAX ITERATIONS = " + str(max_iteration) + " MOMENTUM = " + str(momentum) + " P = " + str(p)) print() print("RBF") nn = Rbf(7, 3) nn.train(train, eta=0.5, max_iterations=max_iteration) ac = training.accuracy(nn, test, 3) rbf_accuracy += ac print("ACCURACY =", ac) print() print("MLP") example = test.values.tolist() mm = Mlp(7, hidden_layer[layer - 1], 3, n_hidden_layers=layer) mm.backpropagation(train.values.tolist(), eta=eta, max_iterations=max_iteration) ac = training.accuracy(mm, test, n_classes=3) mlp_accuracy += ac print("ACCURACY =", ac) print() print("Rbf:") nn.feed_forward(example[15][:(-1 * 3)]) print(example[15]) print("Result 1") nn.show_class() print() print("Mlp") print(example[15]) nn.feed_forward(example[15][:(-1 * 3)]) print("Result 2") mm.show_class() print() print( "******************************************************//******************************************************" ) print() print(tests, " tests executed. Rbf accuracy:", rbf_accuracy / tests, " Mlp accuracy:", mlp_accuracy / tests)