Пример #1
0
def main(argv):
    if len(argv) < 3:
        usage = 'Usage:  %s RESULT_FILE.json DATASET_FILE'
        print(usage % argv[0], file=sys.stderr)
        return 1

    result_file = argv[1]
    dataset_file = argv[2]

    with open(result_file, 'r') as file:
        result = json.loads(file.read())

    network = NeuralNetwork(result['lambda'], result['structure'])
    dataset = Dataset(parsing.parse_dataset_file(dataset_file), normalize=True)
    batches = dataset.random_folds(NUM_BATCHES)

    counter = 1
    print('Iterations,J_t')

    for j_t in network.train(batches, **result['training'], skip=SKIP):
        print('%s,%s' % (counter * SKIP, j_t))
        counter += 1
        if counter >= MAX_SAMPLES: break
    else:
        print('%s,%s' % (counter * SKIP, j_t))
Пример #2
0
def test_join_datasets():
    d1 = Dataset([
        ([0.], [0.]),
        ([1.], [1.]),
    ])
    d2 = Dataset([
        ([2.], [2.]),
        ([3.], [3.]),
    ])
    combined = join_datasets([d1, d2])

    assert combined.features.tolist() == [[0.], [1.], [2.], [3.]]
    assert combined.expectations.tolist() == [[0.], [1.], [2.], [3.]]
Пример #3
0
def test_join_datasets():
    d1 = Dataset([
        ([0.], [1.]),
        ([2.], [3.]),
    ])
    d2 = Dataset([
        ([4.], [5.]),
        ([6.], [7.]),
    ])
    combined = join_datasets([d1, d2])

    assert combined.features.tolist() == [[0.], [2.], [4.], [6.]]
    assert combined.labels.tolist() == [[1.], [3.], [5.], [7.]]
Пример #4
0
def main(argv):
    try:
        dataset_file = argv[1]
    except IndexError:
        print('Usage:  %s DATASET_FILE' % argv[0], file=sys.stderr)
        return 1

    instances = parsing.parse_dataset_file(dataset_file)
    dataset = Dataset(instances, normalize=True)

    structures = ([dataset.num_inputs()] + hidden + [dataset.num_outputs()]
                  for hidden in HIDDEN_LAYERS)

    for structure in structures:
        for lambda_ in LAMBDAS:
            network = NeuralNetwork(lambda_, structure)
            results = cross_validation(network, dataset, NUM_FOLDS, **PARAMS)
            save_results(RESULTS_DIR, results)
Пример #5
0
def main(argv):
    try:
        dataset_file = argv[1]
    except IndexError:
        print('Usage:  %s DATASET_FILE' % argv[0], file=sys.stderr)
        return 1

    instances = parsing.parse_dataset_file(dataset_file)
    dataset = Dataset(instances, normalize=True)

    n_inputs = dataset.num_inputs()
    n_outputs = dataset.num_outputs()
    structures = ([n_inputs] + hidden_layers + [n_outputs]
                  for hidden_layers in hidden_layer_variations(
                      NUMS_HIDDEN_LAYERS, NUMS_NEURONS_PER_LAYER))

    for network in network_variations(LAMBDAS, structures):
        results = cross_validation(network, dataset, NUM_FOLDS, **PARAMS)
        save_results(RESULTS_DIR, results)
Пример #6
0
def test_random_folds():
    dataset = Dataset([
        ([0.], [1., 0.]),
        ([1.], [1., 0.]),
        ([2.], [1., 0.]),
        ([3.], [1., 0.]),
        ([4.], [0., 1.]),
        ([5.], [0., 1.]),
        ([6.], [0., 1.]),
        ([7.], [0., 1.]),
    ])
    folds = dataset.random_folds(4)

    assert len(folds) == 4
    for fold in folds:
        assert isinstance(fold, Dataset)
        assert len(fold) == 2

        # cada fold deve ter uma instância de cada classe
        assert fold.expectations[0].tolist() != fold.expectations[1].tolist()
Пример #7
0
def test_normalize():
    features = [
        [0.0, -1.0],
        [1.0, 1.0],
        [4.0, 0.0],
    ]
    normalized_features = [
        [0.00, 0.0],
        [0.25, 1.0],
        [1.00, 0.5],
    ]
    instances = [(xs, [0.]) for xs in features]
    dataset = Dataset(instances, normalize=True)
    assert dataset.features.tolist() == normalized_features
Пример #8
0
def main(argv, calculate_gradients=backpropagation):
    try:
        network_file, weights_file, dataset_file = argv[1:]
    except ValueError:
        print(USAGE % argv[0], file=sys.stderr)
        return 1

    network = NeuralNetwork(*parsing.parse_network_file(network_file))
    network.set_weights(parsing.parse_weights_file(weights_file))
    dataset = Dataset(parsing.parse_dataset_file(dataset_file))

    # Calcula os gradientes usando a função fornecida
    gradients = calculate_gradients(network, dataset)

    # Imprime as matrizes de gradientes, uma matriz (camada) por linha:
    # - linhas separadas por ponto-e-vírgula
    # - elementos separados por vírgula, com 5 casas decimais
    for matrix in gradients:
        rows = [', '.join('%.5f' % val for val in row) for row in matrix]
        print('; '.join(rows))
Пример #9
0
#!/usr/bin/env python3

import sys
import timeit

import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

from trabalho2 import parsing
from trabalho2.dataset import Dataset
from trabalho2.neural_network import NeuralNetwork

path = 'tests/fixtures/exemplo2/%s.txt'

network = NeuralNetwork(*parsing.parse_network_file(path % 'network'))
network.set_weights(parsing.parse_weights_file(path % 'initial_weights'))
dataset = Dataset(parsing.parse_dataset_file(path % 'dataset'))

results = timeit.repeat(
    'network.gradients(dataset)',
    number=10000,
    globals=globals())

for r in results:
    print(r)
Пример #10
0
def test_features(example):
    instances = example['dataset']
    dataset = Dataset(instances)
    assert dataset.features.tolist() == [i[0] for i in instances]
Пример #11
0
def test_expectations(example):
    instances = example['dataset']
    dataset = Dataset(instances)
    assert dataset.expectations.tolist() == [i[1] for i in instances]
Пример #12
0
def test_labels(example):
    instances = example['dataset']
    dataset = Dataset(instances)
    assert dataset.labels.tolist() == [i[1] for i in instances]
Пример #13
0
def init_network_and_dataset(example):
    # Inicializa uma rede neural e um dataset com os valores do exemplo dado.

    network = init_network(example)
    dataset = Dataset(example['dataset'])
    return network, dataset