from plot import plot_learning_curve from metrics import dice_coef_loss, dice_coef # Read the data path = '/Lab1/Lab3/X_ray/' img_h, img_w = 256, 256 Mask = gen_list(path, 'Mask') Img = gen_list(path,'Image') Mask_train, Mask_val, Img_train, Img_val = shuffle_split(Mask, Img, 0.8) Mask_train = read_data(path+'Mask/', Mask_train, img_h, img_w) Mask_val = read_data(path+'Mask/', Mask_val, img_h, img_w) Img_train = read_data(path+'Image/', Img_train, img_h, img_w) Img_val = read_data(path+'Image/', Img_val, img_h, img_w) # Train the model model = get_UNet(img_shape=(256,256,1), Base=16, depth=4, inc_rate=2, activation='relu', drop=0.5, batchnorm=True) model.compile(optimizer=Adam(lr=0.0001), loss='binary_crossentropy', metrics=[dice_coef]) History = model.fit(Img_train, Mask_train, batch_size=8, epochs=150, verbose=2, validation_data=(Img_val, Mask_val)) # Plot the learning curve plot_learning_curve(History, 'Task1a') # Train the model model = get_UNet(img_shape=(256,256,1), Base=16, depth=4, inc_rate=2, activation='relu', drop=0.5, batchnorm=True) model.compile(optimizer=Adam(lr=0.0001), loss=[dice_coef_loss],
# Get the patches of training images patches_imgs_train, patches_masks_train = prepare_training_data( imgs_train, truth_train, patch_h, patch_w, N_patches) # Parameters for training model N_epochs = 150 batch_size = 64 lr = 0.1 decay_rate = lr / N_epochs sgd = SGD(lr=lr, momentum=0.8, decay=decay_rate, nesterov=False) model = get_UNet(img_shape=(patch_h, patch_w, 1), Base=32, depth=4, inc_rate=2, activation='relu', drop=0.2, batchnorm=True, N=2) model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy']) # Save the architecture of the model json_string = model.to_json() open('model_architecture.json', 'w').write(json_string) # Save the best weights checkpointer = ModelCheckpoint(filepath='best_weights.h5', monitor='val_loss',
k=3 # k=5 Mask = split_list(Mask,k) Img = split_list(Img,k) for i in range(k): Mask_val = list(Mask[i]) Mask_train =list(chain.from_iterable(Mask[:i] + Mask[i+1:])) Img_val = list(Img[i]) Img_train =list(chain.from_iterable(Img[:i] + Img[i+1:])) Mask_train = read_data(path+'Mask/', Mask_train, img_h, img_w) Mask_val =read_data(path+'Mask/', Mask_val, img_h, img_w) Img_train = read_data(path+'Image/', Img_train, img_h, img_w) Img_val = read_data(path+'Image/', Img_val, img_h, img_w) model = get_UNet(img_shape=(img_h, img_w, 1), Base=16, depth=4, inc_rate=2, activation='relu', drop=0, batchnorm=True, N=2, weight_use=False) model.compile(optimizer=Adam(lr=1e-5), loss=[dice_coef_loss], metrics=[dice_coef,precision,recall]) History = model.fit(Img_train, Mask_train, batch_size=8, epochs=150, verbose=1, validation_data=(Img_val, Mask_val)) plot_learning_curve(History, 'Task1_k={0}_loss_{1}_'.format(k,i+1)) plot_validation_metric(History, 'Task1_k={0}_metrics_{1}_'.format(k,i+1)) ''' Task 2 ''' k=3 # k=5 Mask = split_list(Mask,k)
data_gen_args = dict(rotation_range=5, width_shift_range=0.1, height_shift_range=0.1, validation_split=0.2) image_train_datagen = ImageDataGenerator(**data_gen_args) mask_train_datagen = ImageDataGenerator(**data_gen_args) image_val_datagen = ImageDataGenerator(**data_gen_args) mask_val_datagen = ImageDataGenerator(**data_gen_args) seed = 1 batch_size = 8 model = get_UNet(img_shape=(img_h, img_w, 1), Base=16, depth=4, inc_rate=2, activation='relu', drop=0, batchnorm=True, N=2) model.compile(optimizer=Adam(lr=0.0001), loss=[dice_coef_loss], metrics=[dice_coef, recall, precision]) # Train model History = model.fit_generator( XYaugmentGenerator(image_train_datagen, mask_train_datagen, Img_train, Mask_train, seed, batch_size), steps_per_epoch=np.ceil(float(len(Img_train)) / float(batch_size)), validation_data=XYaugmentGenerator(image_val_datagen, mask_val_datagen, Img_val, Mask_val, seed, batch_size),