Esempio n. 1
0
def main(argv):
    if len(argv) < 3:
        usage = 'Usage:  %s RESULT_FILE.json DATASET_FILE'
        print(usage % argv[0], file=sys.stderr)
        return 1

    result_file = argv[1]
    dataset_file = argv[2]

    with open(result_file, 'r') as file:
        result = json.loads(file.read())

    network = NeuralNetwork(result['lambda'], result['structure'])
    dataset = Dataset(parsing.parse_dataset_file(dataset_file), normalize=True)
    batches = dataset.random_folds(NUM_BATCHES)

    counter = 1
    print('Iterations,J_t')

    for j_t in network.train(batches, **result['training'], skip=SKIP):
        print('%s,%s' % (counter * SKIP, j_t))
        counter += 1
        if counter >= MAX_SAMPLES: break
    else:
        print('%s,%s' % (counter * SKIP, j_t))
Esempio n. 2
0
def main(argv, calculate_gradients=backpropagation):
    try:
        network_file, weights_file, dataset_file = argv[1:]
    except ValueError:
        print(USAGE % argv[0], file=sys.stderr)
        return 1

    network = NeuralNetwork(*parsing.parse_network_file(network_file))
    network.set_weights(parsing.parse_weights_file(weights_file))
    dataset = Dataset(parsing.parse_dataset_file(dataset_file))

    # Calcula os gradientes usando a função fornecida
    gradients = calculate_gradients(network, dataset)

    # Imprime as matrizes de gradientes, uma matriz (camada) por linha:
    # - linhas separadas por ponto-e-vírgula
    # - elementos separados por vírgula, com 5 casas decimais
    for matrix in gradients:
        rows = [', '.join('%.5f' % val for val in row) for row in matrix]
        print('; '.join(rows))
Esempio n. 3
0
def main(argv):
    try:
        dataset_file = argv[1]
    except IndexError:
        print('Usage:  %s DATASET_FILE' % argv[0], file=sys.stderr)
        return 1

    instances = parsing.parse_dataset_file(dataset_file)
    dataset = Dataset(instances, normalize=True)

    structures = ([dataset.num_inputs()] + hidden + [dataset.num_outputs()]
                  for hidden in HIDDEN_LAYERS)

    for structure in structures:
        for lambda_ in LAMBDAS:
            network = NeuralNetwork(lambda_, structure)
            results = cross_validation(network, dataset, NUM_FOLDS, **PARAMS)
            save_results(RESULTS_DIR, results)
Esempio n. 4
0
def do_neural_network():
    NN = NeuralNetwork(x[0], y[0], hidden_size, learn_rate, 'relu')

    batch_size = 64

    epochs = 300

    NN.train(x,
             y,
             epochs=epochs,
             num_batchs=num_batchs,
             batch_size=batch_size,
             stop_cond=0.001)

    plt.plot(np.arange(len(NN.losses)), NN.losses)
    plt.savefig('loss' + '.png')
    plt.show()

    accur = NN.eval(val_x, val_y)
    print('validation accur: ', accur)

    test_accur, predicts = NN.eval(test_x, test_y)
    print('test accur: ', accur)

    print('NN data_size: {} '
          'hidden_size: {} '
          'learn_rate: {} '
          'batch_size: {} '
          'epochs: {} '
          'val accur: {} '
          'test accur: {} '
          'loss: {}\n'.format(data_size, hidden_size, learn_rate, batch_size,
                              epochs, accur, test_accur, NN.losses[-1]))

    with open('results.txt', 'a') as f:
        f.write('data_size: {} '
                'hidden_size: {} '
                'learn_rate: {} '
                'batch_size: {} '
                'epochs: {} '
                'val accur: {} '
                'test accur: {} '
                'loss: {}\n'.format(data_size, hidden_size, learn_rate,
                                    batch_size, epochs, accur, test_accur,
                                    NN.losses[-1]))

    with open('nn_weights.txt', 'w+') as f:
        f.write('W1: {}\nb1: {}\nW2: {}\nb2: {}'.format(
            NN.W1, NN.b1, NN.W2, NN.b2))

    plot_confusion_matrix(test_y, predicts, classes)
Esempio n. 5
0
#!/usr/bin/env python3

import sys
import timeit

import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

from trabalho2 import parsing
from trabalho2.dataset import Dataset
from trabalho2.neural_network import NeuralNetwork

path = 'tests/fixtures/exemplo2/%s.txt'

network = NeuralNetwork(*parsing.parse_network_file(path % 'network'))
network.set_weights(parsing.parse_weights_file(path % 'initial_weights'))
dataset = Dataset(parsing.parse_dataset_file(path % 'dataset'))

results = timeit.repeat(
    'network.gradients(dataset)',
    number=10000,
    globals=globals())

for r in results:
    print(r)
Esempio n. 6
0
def network_variations(lambdas, structures):
    for structure in structures:
        for lambda_ in lambdas:
            yield NeuralNetwork(lambda_, structure)
def test_lambda_(example):
    lambda_, structure = example['network']
    network = NeuralNetwork(lambda_, structure)

    assert network.lambda_ == lambda_
def test_structure(example):
    lambda_, structure = example['network']
    network = NeuralNetwork(lambda_, structure)

    assert list(network.structure) == structure
def init_network(example):
    # Inicializa uma rede neural com os valores do exemplo dado.

    network = NeuralNetwork(*example['network'])
    network.set_weights(example['weights'])
    return network