plt.grid(True)
    labels = map(str, range(2,11))
    xlocations = na.array(range(len(points))) + .5
    width = .5
    plt.bar(xlocations, points, width=width)
    plt.xticks(xlocations + width/2, labels)
    plt.savefig(file_name)
    plt.clf()

log_file = open('backpropagation.log', 'w')

for training_set, ts_name in training_sets:
    training_set = load_training_set(training_set)
    print '.. begining %s' % ts_name
    log_file.write('.. begining %s\n' % ts_name)
    lowest_error = maxint
    total_error_log = []
    for hs in range(2, 11):
        print '      %s hidden neurons' % hs
        nn = NeuralNetwork(2, hs, 1)
        error_log = training(nn, training_set, max_iterations=2000)
        test_log, total_error = test(nn, get_random_set(10000))
        log_file.write('%s neurons: %s errors \nhidden:  %s\noutputs: %s\n\n' % (hs, total_error, [hn.weights for hn in nn.hidden], [on.weights for on in nn.outputs]))
        total_error_log.append((hs, total_error, error_log, test_log))
    best = min(total_error_log, key=itemgetter(1))
    print '   lowest error on %s hidden neurons network: %s failures' % (best[0], best[1])
    plot(best[2], best[3], save='media/%s-%s-best-performance.png' % (ts_name, best[0]))
    bar_plot(map(itemgetter(1), total_error_log), 'media/error-derivation-%s' % ts_name)

log_file.close()
示例#2
0
if __name__ == "__main__":

    batch, output = 10, 3
    train_percent = 80
    episodes = int(1e3)
    learning_rate = 1e-2
    encoding = [("setosa", 0), ("versicolor", 1), ("virginica", 2)]
    X, y = network.extract("iris.csv",
                           encoding=encoding,
                           output=output,
                           label="species")
    dimension = len(X[0])
    neurons = [dimension, 10, 20, 20, 5, output]
    trainer, tester, validater = network.partition(X, y, output, batch,
                                                   train_percent)
    activity = ["relu", "relu", "relu", "relu", ""]
    cost = "crossentropy"
    optimizer = "adam"
    model, error, accuracy = network.learn(trainer, neurons, activity,
                                           learning_rate, episodes, cost,
                                           optimizer)
    network.plot(
        error, "forestgreen",
        "accumulated_errors_over_each_epoch_multilabel_classification",
        "Episode", "Error")
    network.plot(accuracy, "mediumvioletred",
                 "accuracy_over_each_epoch_multilabel_classification",
                 "Episode", "Learning accuracy")
    network.test(model, tester, output)
示例#3
0

log_file = open('backpropagation.log', 'w')

for training_set, ts_name in training_sets:
    training_set = load_training_set(training_set)
    print '.. begining %s' % ts_name
    log_file.write('.. begining %s\n' % ts_name)
    lowest_error = maxint
    total_error_log = []
    for hs in range(2, 11):
        print '      %s hidden neurons' % hs
        nn = NeuralNetwork(2, hs, 1)
        error_log = training(nn, training_set, max_iterations=2000)
        test_log, total_error = test(nn, get_random_set(10000))
        log_file.write('%s neurons: %s errors \nhidden:  %s\noutputs: %s\n\n' %
                       (hs, total_error, [hn.weights for hn in nn.hidden
                                          ], [on.weights
                                              for on in nn.outputs]))
        total_error_log.append((hs, total_error, error_log, test_log))
    best = min(total_error_log, key=itemgetter(1))
    print '   lowest error on %s hidden neurons network: %s failures' % (
        best[0], best[1])
    plot(best[2],
         best[3],
         save='media/%s-%s-best-performance.png' % (ts_name, best[0]))
    bar_plot(map(itemgetter(1), total_error_log),
             'media/error-derivation-%s' % ts_name)

log_file.close()