def train(DATASET="DRIVE", crop_size=64, need_au=True, ACTIVATION='ReLU', dropout=0.2, batch_size=20, repeat=5, minimum_kernel=32, epochs=50): print('-'*40) print('Loading and preprocessing train data...') print('-'*40) network_name = "Unet" model_name = f"{network_name}_cropsize_{crop_size}_epochs_{epochs}" print("Model : %s" % model_name) prepare_dataset.prepareDataset(DATASET) activation = globals()[ACTIVATION] print('-' * 35) print('Creating and compiling model...') print('-' * 35) model = get_unet(minimum_kernel=minimum_kernel, do=dropout, size=crop_size, activation=activation) try: os.makedirs(f"./trained_model/{model_name}/", exist_ok=True) os.makedirs(f"./logs/{model_name}/", exist_ok=True) except: pass plot_model(model, to_file = './trained_model/'+ model_name + '/'+ model_name + '_model.png') #check how the model looks like json_string = model.to_json() with open('./trained_model/'+ model_name + '/'+ model_name + '_architecture.json', 'w') as jsonfile: jsonfile.write(json_string) now = datetime.now() # current date and time date_time = now.strftime("%Y-%m-%d---%H-%M-%S") tensorboard = TensorBoard( log_dir=f"./logs/{model_name}/{model_name}---{date_time}", histogram_freq=0, batch_size=32, write_graph=True, write_grads=True, write_images=True, embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None, embeddings_data=None, update_freq='epoch') save_path = f"./trained_model/{model_name}/{model_name}.hdf5" checkpoint = ModelCheckpoint(save_path, monitor='loss', verbose=1, save_best_only=True, mode='min') print('-'*30) print('Fitting model...') print('-'*30) data_generator = Generator(batch_size, repeat, DATASET) history = model.fit_generator(data_generator.gen(au=need_au, crop_size=crop_size), epochs=epochs, verbose=1, steps_per_epoch=1000 * data_generator.n // batch_size, use_multiprocessing=False, workers=0, callbacks=[tensorboard, checkpoint]) print('-'*30) print('Training finished') print('-'*30)
def predict(ACTIVATION='ReLU', dropout=0.2, minimum_kernel=32, epochs=50, crop_size=64, stride_size=3, DATASET='DRIVE'): print('-' * 40) print('Loading and preprocessing test data...') print('-' * 40) network_name = "Res-unet" model_name = f"{network_name}_cropsize_{crop_size}_epochs_{epochs}" prepare_dataset.prepareDataset(DATASET) test_data = [ prepare_dataset.getTestData(0, DATASET), prepare_dataset.getTestData(1, DATASET), prepare_dataset.getTestData(2, DATASET) ] IMAGE_SIZE = None if DATASET == 'DRIVE': IMAGE_SIZE = (565, 584) gt_list_out = {} pred_list_out = {} try: os.makedirs(f"./output/{model_name}/crop_size_{crop_size}/out/", exist_ok=True) gt_list_out.update({f"out": []}) pred_list_out.update({f"out": []}) except: pass print('-' * 30) print('Loading saved weights...') print('-' * 30) activation = globals()[ACTIVATION] model = get_res_unet(minimum_kernel=minimum_kernel, do=dropout, size=crop_size, activation=activation) print("Model : %s" % model_name) load_path = f"./trained_model/{model_name}/{model_name}.hdf5" model.load_weights(load_path, by_name=False) imgs = test_data[0] segs = test_data[1] masks = test_data[2] print('-' * 30) print('Predicting masks on test data...') print('-' * 30) print('\n') for i in tqdm(range(len(imgs))): img = imgs[i] # (576,576,3) seg = segs[i] # (576,576,1) mask = masks[i] # (584,565,1) patches_pred, new_height, new_width, adjustImg = crop_prediction.get_test_patches( img, crop_size, stride_size) pred = model.predict(patches_pred) # 预测数据 pred_patches = crop_prediction.pred_to_patches(pred, crop_size, stride_size) pred_imgs = crop_prediction.recompone_overlap(pred_patches, crop_size, stride_size, new_height, new_width) pred_imgs = pred_imgs[:, 0:prepare_dataset.DESIRED_DATA_SHAPE[0], 0:prepare_dataset.DESIRED_DATA_SHAPE[0], :] probResult = pred_imgs[0, :, :, 0] # (576,576) pred_ = probResult with open( f"./output/{model_name}/crop_size_{crop_size}/out/{i + 1:02}.pickle", 'wb') as handle: pickle.dump(pred_, handle, protocol=pickle.HIGHEST_PROTOCOL) pred_ = resize(pred_, IMAGE_SIZE[::-1]) # (584,565) mask_ = mask mask_ = resize(mask_, IMAGE_SIZE[::-1]) # (584,565) seg_ = seg seg_ = resize(seg_, IMAGE_SIZE[::-1]) # (584,565) gt_ = (seg_ > 0.5).astype(int) gt_flat = [] pred_flat = [] for p in range(pred_.shape[0]): for q in range(pred_.shape[1]): if mask_[p, q] > 0.5: # Inside the mask pixels only gt_flat.append(gt_[p, q]) pred_flat.append(pred_[p, q]) gt_list_out[f"out"] += gt_flat pred_list_out[f"out"] += pred_flat pred_ = 255. * (pred_ - np.min(pred_)) / (np.max(pred_) - np.min(pred_)) cv2.imwrite( f"./output/{model_name}/crop_size_{crop_size}/out/{i + 1:02}.png", pred_) print('-' * 30) print('Prediction finished') print('-' * 30) print('\n') print('-' * 30) print('Evaluate the results') print('-' * 30) evaluate(gt_list_out[f"out"], pred_list_out[f"out"], epochs, crop_size, DATASET, network_name) print('-' * 30) print('Evaluate finished') print('-' * 30)
def train(iteration=3, DATASET='ALL', crop_size=128, need_au=True, ACTIVATION='ReLU', dropout=0.1, batch_size=32, repeat=4, minimum_kernel=32, epochs=200): model_name = f"Final_Emer_Iteration_{iteration}_cropsize_{crop_size}_epochs_{epochs}" print("Model : %s" % model_name) prepare_dataset.prepareDataset(DATASET) activation = globals()[ACTIVATION] model = define_model.get_unet(minimum_kernel=minimum_kernel, do=dropout, activation=activation, iteration=iteration) try: os.makedirs(f"trained_model/{DATASET}/", exist_ok=True) os.makedirs(f"logs/{DATASET}/", exist_ok=True) except: pass load_path = f"trained_model/{DATASET}/{model_name}_weights.best.hdf5" try: model.load_weights(load_path, by_name=True) except: pass now = datetime.now() # current date and time date_time = now.strftime("%Y-%m-%d---%H-%M-%S") tensorboard = TensorBoard( log_dir= f"logs/{DATASET}/Final_Emer-Iteration_{iteration}-Cropsize_{crop_size}-Epochs_{epochs}---{date_time}", histogram_freq=0, batch_size=32, write_graph=True, write_grads=True, write_images=True, embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None, embeddings_data=None, update_freq='epoch') save_path = f"trained_model/{DATASET}/{model_name}.hdf5" checkpoint = ModelCheckpoint(save_path, monitor='seg_final_out_loss', verbose=1, save_best_only=True, mode='min') data_generator = define_model.Generator(batch_size, repeat, DATASET) history = model.fit_generator(data_generator.gen(au=need_au, crop_size=crop_size, iteration=iteration), epochs=epochs, verbose=1, steps_per_epoch=100 * data_generator.n // batch_size, use_multiprocessing=True, workers=8, callbacks=[tensorboard, checkpoint])
def predict(ACTIVATION='ReLU', dropout=0.1, batch_size=32, repeat=4, minimum_kernel=32, epochs=200, iteration=3, crop_size=128, stride_size=3, DATASET='DRIVE'): prepare_dataset.prepareDataset(DATASET) test_data = [ prepare_dataset.getTestData(0, DATASET), prepare_dataset.getTestData(1, DATASET), prepare_dataset.getTestData(2, DATASET) ] IMAGE_SIZE = None if DATASET == 'DRIVE': IMAGE_SIZE = (565, 584) elif DATASET == 'CHASEDB1': IMAGE_SIZE = (999, 960) elif DATASET == 'STARE': IMAGE_SIZE = (700, 605) gt_list_out = {} pred_list_out = {} for out_id in range(iteration + 1): try: os.makedirs( f"./output/{DATASET}/crop_size_{crop_size}/out{out_id + 1}/", exist_ok=True) gt_list_out.update({f"out{out_id + 1}": []}) pred_list_out.update({f"out{out_id + 1}": []}) except: pass activation = globals()[ACTIVATION] model = define_model.get_unet(minimum_kernel=minimum_kernel, do=dropout, activation=activation, iteration=iteration) model_name = f"Final_Emer_Iteration_{iteration}_cropsize_{crop_size}_epochs_{epochs}" print("Model : %s" % model_name) load_path = f"trained_model/{DATASET}/{model_name}.hdf5" model.load_weights(load_path, by_name=False) imgs = test_data[0] segs = test_data[1] masks = test_data[2] for i in tqdm(range(len(imgs))): img = imgs[i] seg = segs[i] if masks: mask = masks[i] patches_pred, new_height, new_width, adjustImg = crop_prediction.get_test_patches( img, crop_size, stride_size) preds = model.predict(patches_pred) out_id = 0 for pred in preds: pred_patches = crop_prediction.pred_to_patches( pred, crop_size, stride_size) pred_imgs = crop_prediction.recompone_overlap( pred_patches, crop_size, stride_size, new_height, new_width) pred_imgs = pred_imgs[:, 0:prepare_dataset.DESIRED_DATA_SHAPE[0], 0:prepare_dataset.DESIRED_DATA_SHAPE[0], :] probResult = pred_imgs[0, :, :, 0] pred_ = probResult with open( f"./output/{DATASET}/crop_size_{crop_size}/out{out_id + 1}/{i + 1:02}.pickle", 'wb') as handle: pickle.dump(pred_, handle, protocol=pickle.HIGHEST_PROTOCOL) pred_ = resize(pred_, IMAGE_SIZE[::-1]) if masks: mask_ = mask mask_ = resize(mask_, IMAGE_SIZE[::-1]) seg_ = seg seg_ = resize(seg_, IMAGE_SIZE[::-1]) gt_ = (seg_ > 0.5).astype(int) gt_flat = [] pred_flat = [] for p in range(pred_.shape[0]): for q in range(pred_.shape[1]): if not masks or mask_[ p, q] > 0.5: # Inside the mask pixels only gt_flat.append(gt_[p, q]) pred_flat.append(pred_[p, q]) gt_list_out[f"out{out_id + 1}"] += gt_flat pred_list_out[f"out{out_id + 1}"] += pred_flat pred_ = 255. * (pred_ - np.min(pred_)) / (np.max(pred_) - np.min(pred_)) cv2.imwrite( f"./output/{DATASET}/crop_size_{crop_size}/out{out_id + 1}/{i + 1:02}.png", pred_) out_id += 1 for out_id in range(iteration + 1)[-1:]: print('\n\n', f"out{out_id + 1}") evaluate(gt_list_out[f"out{out_id + 1}"], pred_list_out[f"out{out_id + 1}"], DATASET)