Mask_train = read_data(path+'Mask/', Mask_train, img_h, img_w)
    Mask_val =read_data(path+'Mask/', Mask_val, img_h, img_w)
    Img_train = read_data(path+'Image/', Img_train, img_h, img_w)
    Img_val = read_data(path+'Image/', Img_val, img_h, img_w)
    
    model = get_UNet(img_shape=(img_h, img_w, 1), Base=16, depth=4, inc_rate=2, 
                 activation='relu', drop=0, batchnorm=True, N=2, weight_use=False)

    model.compile(optimizer=Adam(lr=1e-5), loss=[dice_coef_loss], 
                  metrics=[dice_coef,precision,recall])

    History = model.fit(Img_train, Mask_train, batch_size=8, epochs=150, verbose=1, 
                        validation_data=(Img_val, Mask_val))

    plot_learning_curve(History, 'Task1_k={0}_loss_{1}_'.format(k,i+1))
    plot_validation_metric(History, 'Task1_k={0}_metrics_{1}_'.format(k,i+1))
    

''' Task 2 ''' 

k=3 # k=5
Mask = split_list(Mask,k)
Img = split_list(Img,k)

radius = 2
weight_strength=1
batch_size =8

for i in range(k):
    Mask_val = list(Mask[i])
    Mask_train =list(chain.from_iterable(Mask[:i] + Mask[i+1:])) 
y_train = [int(name.split(os.path.sep)[-2]) for name in train]
y_train = utils.to_categorical(y_train)
class_weight = np.max(y_train.sum(axis=0)) / y_train.sum(axis=0)

# Create and train the model
model = resnet_18(n_class=2)
epochs = 150
lr = 1e-5

model.compile(loss='categorical_crossentropy',
              optimizer=Adam(lr=lr), 
              metrics=['accuracy'])

History = model.fit_generator(
    train_gen,
    steps_per_epoch=train_gen.n//Batch_size,
    epochs=epochs,
    verbose=2,
    validation_data=val_gen,
    validation_steps=val_gen.n//Batch_size,
    class_weight=class_weight)

# Plot learning curves
plot_learning_curve(History, 'loss')
plot_validation_metric(History, 'metrics')

# Save the trained model
model.save('model.h5')
print("Model saved")

Beispiel #3
0
from dataloader import gen_list, shuffle_split, read_data, read_data_onehot
from Unet import get_UNet_multi
from plot import plot_learning_curve, plot_validation_metric
from metrics import dice_coef_loss, dice_coef, precision, recall

# Read the data
path = '/Lab1/Lab3/CT/'
img_h, img_w = 256, 256
Mask = gen_list(path, 'Mask')
Img = gen_list(path,'Image')

Mask_train, Mask_val, Img_train, Img_val = shuffle_split(Mask, Img, 0.8)
Mask_train = read_mask_onehot(path+'Mask/', Mask_train, img_h, img_w)
Mask_val = read_mask_onehot(path+'Mask/', Mask_val, img_h, img_w)
Img_train = read_data(path+'Image/', Img_train, img_h, img_w)
Img_val = read_data(path+'Image/', Img_val, img_h, img_w)

# Train the model
model = get_UNet_multi(img_shape=(256,256,1), Base=16, depth=4, inc_rate=2, 
                 activation='relu', drop=0.5, batchnorm=True, N=3)

model.compile(optimizer=Adam(lr=0.0001), loss=[dice_coef_loss], 
              metrics=[dice_coef, precision, recall])

History = model.fit(Img_train, Mask_train, batch_size=4, epochs=25, verbose=1, 
                    validation_data=(Img_val, Mask_val))

# Plot the learning curves
plot_learning_curve(History, 'Task6_1')
plot_validation_metric(History, 'Task6_2')
Beispiel #4
0
                    validation_data=(Img_validation, Mask_validation))

plot_learning_curve(History, 'Lab3_task5a2')
print('Task 5a2 done !')


# Task 5b, loss = 'binary_crossentropy'
model.compile(optimizer=Adam(lr=0.0001), loss='binary_crossentropy', metrics=[dice_coef, precision, recall])
BS = 8
EPOCHS = 150
History = model.fit_generator(aug.flow(Img_train, Mask_train, batch_size=BS),
                              validation_data=(Img_validation, Mask_validation), steps_per_epoch=len(Img_train) // BS,
                              epochs=EPOCHS)

plot_learning_curve(History, 'Lab3_task5b1_learning_curve')
plot_validation_metric(History, 'Lab3_task5b1_metrics')
print('Task 5b1 done !')


# Task 5b2, loss=[dice_coef_loss]
model.compile(optimizer=Adam(lr=0.0001), loss=[dice_coef_loss], metrics=[dice_coef, precision, recall])

History = model.fit_generator(aug.flow(Img_train, Mask_train, batch_size=BS),
                              validation_data=(Img_validation, Mask_validation), steps_per_epoch=len(Img_train) // BS,
                              epochs=EPOCHS)

plot_learning_curve(History, 'Lab3_task5b2_learning_curve')
plot_validation_metric(History, 'Lab3_task5b2_metrics')
print('Task 5b2 done !')