Exemplo n.º 1
0
def wine_test():
    # Carregando e Normalizando os dados da base de vinhos
    dataset = PreProcessing("wine_dataset.txt")
    dataset.normalize(ignore_first_column=True)
    dataset.switch_first_last_column()
    dataset.normalize_class()

    # Atributos a serem variados nos testes
    n_layers = [1, 2]
    hidden_layer = [10, [5, 5]]
    momentums = [0.3, 0.5, 0.7]
    max_iterations = [100, 250, 500]
    etas = [0.3, 0.5, 0.7]
    ps = [0.5, 0.7, 0.9]

    # Teste
    for layer in n_layers:
        for momentum in momentums:
            for eta in etas:
                for max_iteration in max_iterations:
                    for p in ps:
                        train, test = training.holdout(
                            p, dataset.normalized_dataframe)
                        example = test.values.tolist()
                        print("INPUT NEURONS = 13 HIDDEN NEURONS = " +
                              str(int(10 / layer)) +
                              " OUTPUT NEURONS = 3 HIDDEN LAYER = " +
                              str(layer) + " ETA = " + str(eta) +
                              " MAX ITERATIONS = " + str(max_iteration) +
                              " MOMENTUM = " + str(momentum) + " P = " +
                              str(p))
                        print()
                        nn = Mlp(13,
                                 hidden_layer[layer - 1],
                                 3,
                                 n_hidden_layers=layer)
                        nn.backpropagation(train.values.tolist(),
                                           eta=eta,
                                           max_iterations=max_iteration)
                        print("ACCURACY =",
                              training.accuracy(nn, test, n_classes=3))
                        print()

                        print("Input 1")
                        nn.feed_forward(example[0][:(-1 * 3)])
                        print(example[0])
                        print("Result 1")
                        nn.show_class()
                        print()

                        print("Input 2")
                        print(example[15])
                        nn.feed_forward(example[15][:(-1 * 3)])
                        print("Result 2")
                        nn.show_class()
                        print()
                        print(
                            "******************************************************//******************************************************"
                        )
                        print()
Exemplo n.º 2
0
def music_test():

    # Carregando e normalizando os dados da base de musicas
    dataset = PreProcessing("default_features_1059_tracks.txt")
    dataset.normalize(ignore_first_column=False)

    # Atributos a serem variados no teste
    n_layers = [1, 2]
    hidden_layer = [20, [10, 10]]
    momentums = [0.3, 0.5, 0.7]
    max_iterations = [100, 250, 500]
    etas = [0.3, 0.5, 0.7]
    ps = [0.5, 0.7, 0.9]

    # Teste
    for layer in n_layers:
        for momentum in momentums:
            for eta in etas:
                for max_iteration in max_iterations:
                    for p in ps:
                        train, test = training.holdout(
                            p, dataset.normalized_dataframe)
                        example = test.values.tolist()
                        print("INPUT NEURONS = 68 HIDDEN NEURONS = " +
                              str(int(10 / layer)) +
                              " OUTPUT NEURONS = 2 HIDDEN LAYER = " +
                              str(layer) + " ETA = " + str(eta) +
                              " MAX ITERATIONS = " + str(max_iteration) +
                              " MOMENTUM = " + str(momentum) + " P = " +
                              str(p))
                        print()
                        nn = Mlp(68,
                                 hidden_layer[layer - 1],
                                 2,
                                 n_hidden_layers=layer)
                        nn.backpropagation(train.values.tolist(),
                                           eta=eta,
                                           max_iterations=max_iteration)
                        print("SQUARED ERROR =",
                              training.squared_error(nn, test, n_classes=2))
                        print()

                        print("Input 1")
                        nn.feed_forward(example[0][:(-1 * 2)])
                        print(example[0])
                        print("Result 1")
                        nn.show_class()
                        print()

                        print("Input 2")
                        print(example[15])
                        nn.feed_forward(example[15][:(-1 * 2)])
                        print("Result 2")
                        nn.show_class()
                        print()
                        print(
                            "******************************************************//******************************************************"
                        )
                        print()
Exemplo n.º 3
0
def main():

    dataset = PreProcessing("wine_dataset.txt")
    dataset.normalize(ignore_first_column=True)
    dataset.switch_first_last_column()
    dataset.normalize_class()

    train, test = training.holdout(0.7, dataset.normalized_dataframe)

    nn = Mlp(13, 10, 3, n_hidden_layers=1)
    nn.backpropagation(train.values.tolist(), eta=0.5)

    example = test.values.tolist()
    print(len(example))
    input()
    #print(example)
    #print(example[17])
    #feed example
    nn.feed_forward(example[0][:(-1 * 3)])
    print(example[0])
    nn.show_class()

    nn.feed_forward(example[40][:(-1 * 3)])
    print(example[40])
    print(test.iloc[[40]].values.tolist())
    input()
    nn.show_class()

    nn.feed_forward(example[31][:(-1 * 3)])
    print(example[31])
    nn.show_class()

    print(training.accuracy(nn, test, n_classes=3))
    """