Ejemplo n.º 1
0
def test(net, P, T, filename, epochs, learning_rate, block_size):
    logger.info("Running test for: " + filename + ' ' + str(epochs) + ", rate: " + str(learning_rate) + ", block_size: " + str(block_size))
    print("Number of patients with events: " + str(T[:, 1].sum()))
    print("Number of censored patients: " + str((1 - T[:, 1]).sum()))

    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index = " + str(c_index))

    try:
        #net = train_cox(net, (P, T), (None, None), timeslots, epochs, learning_rate = learning_rate)
        net = traingd(net, (P, T), (None, None), epochs, learning_rate, block_size, error_module = cox_error)
    except FloatingPointError:
        print('Aaawww....')
    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index = " + str(c_index))

    plot_network_weights(net)

    return net
Ejemplo n.º 2
0
def experiment(net, P, T, vP, vT, filename, epochs, learning_rate):
    logger.info("Running experiment for: " + filename + ' ' + str(epochs) + ", rate: " + str(learning_rate))
    print("Number of patients with events: " + str(T[:, 1].sum()))
    print("Number of censored patients: " + str((1 - T[:, 1]).sum()))

    timeslots = generate_timeslots(T)

    try:
        net = traingd(net, (P, T), (vP, vT), epochs, learning_rate, block_size = 100, error_module = cox_error)
    except FloatingPointError:
        print('Aaawww....')
    outputs = net.sim(P)
    c_index = get_C_index(T, outputs)
    logger.info("C index = " + str(c_index))

    #plot_network_weights(net)

    kaplanmeier(time_array = T[:, 0], event_array = T[:, 1], output_array = outputs[:, 0])
    if vP is not None and len(vP) > 0:
        outputs = net.sim(vP)
        kaplanmeier(time_array = vT[:, 0], event_array = vT[:, 1], output_array = outputs[:, 0])

    return net
def experiment(net, filename, epochs):
    P, T = parse_file(filename, targetcols = [4], inputcols = [0, 1, 2, 3], ignorecols = [], ignorerows = [], normalize = False)
    #P = P[:100,:]
    #T = T[:100, :]

    try:
        #net = train_cox(net, (P, T), (None, None), timeslots, epochs = 500, learning_rate = 5)
        net = traingd(net, (P, T), (None, None), epochs = epochs, learning_rate = 0.01, block_size = 0)
        #net = train_evolutionary(net, (P, T), (None, None), epochs = epochs)
    except FloatingPointError:
        print('Aaawww....')
    outputs = net.sim(P)

    plot_network_weights(net)

    plt.figure()
    plt.title('Scatter plot sum square error\n' + filename)
    plt.xlabel('Survival time years')
    plt.ylabel('Network output')
    try:
        plt.scatter(T.flatten(), outputs.flatten(), c = 'g', marker = 's')
        plt.plot(T.flatten(), T.flatten(), 'r-')
    except:
        pass
Ejemplo n.º 4
0
Created on Jun 7, 2011

@author: jonask
'''
from kalderstam.neural.error_functions.sum_squares import total_error
from kalderstam.neural.network import build_feedforward
from kalderstam.util.filehandling import parse_data
from kalderstam.neural.training.gradientdescent import traingd
from kalderstam.neural.training.davis_genetic import train_evolutionary
import numpy

xor_set = [[0, 0, 0],
           [0, 1, 1],
           [1, 0, 1],
           [1, 1, 0]]

xor_set = numpy.array(xor_set)

P, T = parse_data(xor_set, targetcols = 2, inputcols = [0, 1], normalize = False)

net = build_feedforward(2, 4, 1)

print("Error before training: " + str(total_error(T, net.sim(P))))
net = traingd(net, (P, T), (None, None), epochs = 1000, learning_rate = 0.1, block_size = 0)
print("Error after training: " + str(total_error(T, net.sim(P))))

net = build_feedforward(2, 4, 1)
print("Error before genetic training: " + str(total_error(T, net.sim(P))))
net = train_evolutionary(net, (P, T), (None, None), epochs = 100, population_size = 100)
print("Error after genetic training: " + str(total_error(T, net.sim(P))))