def test(net, P, T, vP, vT, filename, epochs, mutation_rate = 0.05, population_size = 50):
    logger.info("Running genetic test for: " + filename + ' ' + str(epochs))
    print("Number of patients with events: " + str(T[:, 1].sum()))
    print("Number of censored patients: " + str((1 - T[:, 1]).sum()))


    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index = " + str(c_index))

    try:
        net = train_evolutionary(net, (P, T), (vP, vT), epochs, error_function = c_index_error, population_size = population_size, mutation_chance = mutation_rate)

        outputs = net.sim(P)
        c_index = get_C_index(T, outputs)
        logger.info("C index = " + str(c_index))

        #net = traingd(net, (P, T), (None, None), epochs * 2, learning_rate = 1, block_size = 0, error_module = cox_error)
    except FloatingPointError:
        print('Aaawww....')
    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index test = " + str(c_index))

    outputs = net.sim(vP)
    c_index = get_C_index(vT, outputs)
    logger.info("C index vald = " + str(c_index))

    if plt:
        plot_network_weights(net)

    return net
def test(net, P, T, vP, vT, filename, epochs, mutation_rate = 0.05, population_size = 50):
    logger.info("Running genetic test for: " + filename + ' ' + str(epochs))
    print("\nTraining set:")
    print("Number of patients with events: " + str(T[:, 1].sum()))
    print("Number of censored patients: " + str((1 - T[:, 1]).sum()))
    print("\nValidation set:")
    if vP is not None and len(vP) > 0:
        print("Number of patients with events: " + str(vT[:, 1].sum()))
        print("Number of censored patients: " + str((1 - vT[:, 1]).sum()))
    else:
        print("Empty")


    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index test = " + str(c_index))

    try:
        net = train_evolutionary(net, (P, T), (vP, vT), epochs, error_function = c_index_error, population_size = population_size, mutation_chance = mutation_rate)

        outputs = net.sim(P)

    except FloatingPointError:
        print('Aaawww....')
    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index test = " + str(c_index))

    if vP is not None and len(vP) > 0:
        outputs = net.sim(vP)
        c_index = get_C_index(vT, outputs)
        logger.info("C index vald = " + str(c_index))

    return net
Exemple #3
0
Created on Jun 7, 2011

@author: jonask
'''
from kalderstam.neural.error_functions.sum_squares import total_error
from kalderstam.neural.network import build_feedforward
from kalderstam.util.filehandling import parse_data
from kalderstam.neural.training.gradientdescent import traingd
from kalderstam.neural.training.davis_genetic import train_evolutionary
import numpy

xor_set = [[0, 0, 0],
           [0, 1, 1],
           [1, 0, 1],
           [1, 1, 0]]

xor_set = numpy.array(xor_set)

P, T = parse_data(xor_set, targetcols = 2, inputcols = [0, 1], normalize = False)

net = build_feedforward(2, 4, 1)

print("Error before training: " + str(total_error(T, net.sim(P))))
net = traingd(net, (P, T), (None, None), epochs = 1000, learning_rate = 0.1, block_size = 0)
print("Error after training: " + str(total_error(T, net.sim(P))))

net = build_feedforward(2, 4, 1)
print("Error before genetic training: " + str(total_error(T, net.sim(P))))
net = train_evolutionary(net, (P, T), (None, None), epochs = 100, population_size = 100)
print("Error after genetic training: " + str(total_error(T, net.sim(P))))