Esempio n. 1
0
def continue_bp(net, test_len, testing_name=""):

    # load training data
    training_inputs = helpers.load_training_data()

    # writer for testing data
    csv_file = None
    writer = None
    if testing_name != "":
        path = "./results/" + testing_name
        csv_file = open(path, 'wb+')
        writer = csv.writer(csv_file, delimiter=',')

    # train the net
    for i in range(test_len):

        # Print out progress:
        if i % 100 == 0:
            per = float(i) / float(test_len) * 100
            print("Training: " + str(format(per, '.2f')) + "%")

            # write testing results if testing
            if writer is not None:
                data = [i, helpers.get_testing_error(net)]
                writer.writerow(data)

        # randomly pick an emotion to train
        emotion = random.choice(range(len(cfg.emojis)))

        # pick a specific test case
        test_case = random.choice(training_inputs[emotion])

        net.train(test_case, cfg.outputs[emotion])

    # write testing results if testing
    if writer is not None:
        data = [test_len, helpers.get_testing_error(net)]
        writer.writerow(data)

    # close the file
    if csv_file is not None:
        csv_file.close()

    return net
Esempio n. 2
0
def genetic_train(pop_size, test_len, testing_name):

    writer = None
    csv_file = None
    if testing_name != "":
        path = "./results/" + testing_name
        csv_file = open(path, 'wb+')
        writer = csv.writer(csv_file, delimiter=',')

    print("Training the net!")
    # Initializes the first population
    print("Generation: 0")
    pop = []
    for x in xrange(pop_size):
        net = nn.init_net()
        pop.append(net)

    training_data = helpers.load_training_data()

    # Calculate errors
    errors = gen_err(pop, training_data)

    # Sorts the errors array but only stores indices
    idx_err = sorted(range(len(errors)), key=lambda k: errors[k])

    print(" Smallest error: " + str(errors[idx_err[0]]))

    if writer is not None:
        data = [0, helpers.get_testing_error(pop[idx_err[0]])]
        writer.writerow(data)

    # Goes through the generations
    counter = 1
    while errors[idx_err[0]] > 0.1 and counter < test_len:

        print("Generation: " + str(counter))

        # Decides best 2 parents based on confidence matrix
        parent1 = pop[idx_err[0]]
        parent2 = pop[idx_err[1]]
        w1 = parent1.get_weights()
        w2 = parent2.get_weights()
        mut_cnt = int(cfg.MUT_RATE * len(w1))
        for x in idx_err[2:]:
            new_w = gen.recombine(w1, w2)
            new_w = gen.mutate(new_w, mut_cnt)
            pop[x].put_weights1d(new_w)

        errors = gen_err(pop, training_data)
        idx_err = sorted(range(len(errors)), key=lambda k: errors[k])
        print(" Smallest error: " + str(errors[idx_err[0]]))

        # write the current test_error
        if writer is not None and counter % 5 == 0:
            data = [counter, helpers.get_testing_error(pop[idx_err[0]])]
            writer.writerow(data)
        counter += 1

    # write the last test error:
    if writer is not None:
        data = [counter, helpers.get_testing_error(pop[idx_err[0]])]
        writer.writerow(data)

    # close the file
    if csv_file is not None:
        csv_file.close()

    best_net = pop[idx_err[0]]
    return best_net