def train(fold, train_patient_indexes, val_patient_indexes): log_dir = 'fold_' + str(fold) + '/' if not os.path.isdir(log_dir): os.mkdir(log_dir) num_slices_train = len(train_patient_indexes) * 189 num_slices_val = len(val_patient_indexes) * 189 # Create model K.clear_session() model = create_xception_unet_n( input_shape=input_shape, pretrained_weights_file=pretrained_weights_file) model.compile(optimizer=Adam(lr=1e-3), loss=get_loss, metrics=[dice]) # Get callbacks checkpoint = ModelCheckpoint( log_dir + 'ep={epoch:03d}-loss={loss:.3f}-val_loss={val_loss:.3f}.h5', verbose=1, monitor='val_loss', save_weights_only=True, save_best_only=True, period=1) reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, min_delta=1e-3, patience=3, verbose=1) early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=5, verbose=1) csv_logger = CSVLogger(log_dir + 'record.csv') tensorboard = TensorBoard(log_dir=log_dir) # train the model model.fit_generator( create_train_date_generator(patient_indexes=train_patient_indexes, h5_file_path=data_file_path, batch_size=batch_size), steps_per_epoch=max(1, num_slices_train // batch_size), validation_data=create_val_date_generator( patient_indexes=val_patient_indexes, h5_file_path=data_file_path, batch_size=9), validation_steps=max(1, num_slices_val // 9), epochs=num_epoch, initial_epoch=0, callbacks=[ checkpoint, reduce_lr, early_stopping, tensorboard, csv_logger ]) model.save_weights(log_dir + 'trained_final_weights.h5') #save model itself model.save(os.path.join(log_dir, 'trained_final_model')) # model.save(os.path.join(log_dir, 'trained_final_model.h5')) # Evaluate model predicts = [] labels = [] f = create_val_date_generator(patient_indexes=val_patient_indexes, h5_file_path=data_file_path) for _ in range(num_slices_val): img, label = f.__next__() predicts.append(model.predict(img)) labels.append(label) predicts = np.array(predicts) labels = np.array(labels) score_record = get_score_from_all_slices(labels=labels, predicts=predicts) # save score df = pd.DataFrame(score_record) df.to_csv(os.path.join(log_dir, 'score_record.csv'), index=False) # print score mean_score = {} for key in score_record.keys(): print('In fold ', fold, ', average', key, ' value is: \t ', np.mean(score_record[key])) mean_score[key] = np.mean(score_record[key]) # exit training K.clear_session() return mean_score
output_path_gt_final = os.path.join( output_dir, "".join( ["patient_", str(patient_index), "_gt_", str(num_slices_val - 1)])) if (os.path.isfile("".join([output_path_gt_final, ".npy"]))): print("".join([ "output_path_gt_final (", str("".join([output_path_gt_final, ".npy"])), ") already exists. Skipping patient ", str(patient_index) ]), flush=True) continue f = create_val_date_generator(patient_indexes=val_patient_indexes, h5_file_path=data_file_path) # img = nib.load(sample_input).get_fdata() # print("img shape") # print(np.shape(img))#(197, 233, 189) # sample_img = img[:,:,90] print("create_val_date_generator finished", flush=True) print("".join(["num_slices_val: ", str(num_slices_val)]), flush=True) for slice_index in np.arange(num_slices_val): print("".join([ "patient_index | slice_index: ", str(patient_index), "\t|\t", str(slice_index) ]), flush=True) output_path_seg = os.path.join( output_dir, "".join(