Beispiel #1
0
weights_reg=compute_class_weights(Class)

# weights for the segmentation labels
Class=np.loadtxt('/Tmp/cucurulg/datasets/cortical_layers/6layers_segmentation/training_cls.txt')
weights_seg=compute_class_weights(Class)

# Define training functions

print "Defining and compiling training functions"

prediction_seg, prediction_reg = lasagne.layers.get_output([
                                                simple_net_output[0],
                                                simple_net_output[1]])

# Loss function
loss_reg = weighted_crossentropy(prediction_reg, target_var_reg, weight_vector_reg)
loss_reg = loss_reg.mean()

loss_seg = weighted_crossentropy(prediction_seg, target_var_seg, weight_vector_seg)
loss_seg = loss_seg.mean()

loss = loss_reg+loss_seg

# Add regularization
if weight_decay > 0:
    weightsl2 = regularize_network_params(
        simple_net_output, lasagne.regularization.l2)
    loss += weight_decay * weightsl2

# train accuracy
train_acc_reg = accuracy_regions(prediction_reg, target_var_reg)
Beispiel #2
0
    freq=counts.astype(float)/Class.size
    return np.median(freq)/freq

weights=compute_class_weights(Class)

# Define training functions

print "Defining and compiling training functions"

prediction = lasagne.layers.get_output(simple_net_output[0])

# only for DEBUG!!!!
# debug_prediction = theano.function([input_var], prediction)

# Loss function
loss = weighted_crossentropy(prediction, target_var, weight_vector)
loss = loss.mean()

# Add regularization
if weight_decay > 0:
    weightsl2 = regularize_network_params(
        simple_net_output, lasagne.regularization.l2)
    loss += weight_decay * weightsl2

# train accuracy
train_acc = accuracy_regions(prediction, target_var)

# Define the update function
params = lasagne.layers.get_all_params(simple_net_output, trainable=True)
updates = lasagne.updates.adam(loss, params, learning_rate=learn_step)