def measure(self, generated, vessels, masks, num_data, iter_time, phase, total_time): # masking vessels_in_mask, generated_in_mask = utils.pixel_values_in_mask( vessels, generated, masks) # averaging processing time avg_pt = (total_time / num_data) * 1000 # average processing tiem # evaluate Area Under the Curve of ROC and Precision-Recall auc_roc = utils.AUC_ROC(vessels_in_mask, generated_in_mask) auc_pr = utils.AUC_PR(vessels_in_mask, generated_in_mask) # binarize to calculate Dice Coeffient binarys_in_mask = utils.threshold_by_otsu(generated, masks) dice_coeff = utils.dice_coefficient_in_train(vessels_in_mask, binarys_in_mask) acc, sensitivity, specificity = utils.misc_measures( vessels_in_mask, binarys_in_mask) score = auc_pr + auc_roc + dice_coeff + acc + sensitivity + specificity # # auc_sum for saving best model in training # auc_sum = auc_roc + auc_pr # if self.flags.stage == 2: # #auc_sum = auc_roc + auc_pr # auc_sum = auc_roc + auc_pr # else: # auc_sum = auc_roc + auc_pr auc_sum = dice_coeff + acc + auc_pr # print information ord_output = collections.OrderedDict([('auc_pr', auc_pr), ('auc_roc', auc_roc), ('dice_coeff', dice_coeff), ('acc', acc), ('sensitivity', sensitivity), ('specificity', specificity), ('score', score), ('auc_sum', auc_sum), ('best_auc_sum', self.best_auc_sum), ('avg_pt', avg_pt)]) utils.print_metrics(iter_time, ord_output) # write in tensorboard when in train mode only if phase == 'train': self.model.measure_assign(auc_pr, auc_roc, dice_coeff, acc, sensitivity, specificity, score, iter_time) elif phase == 'test': # write in npy format for evaluation utils.save_obj(vessels_in_mask, generated_in_mask, os.path.join(self.auc_out_dir, "auc_roc.npy"), os.path.join(self.auc_out_dir, "auc_pr.npy")) return auc_sum
def measure(self, generated, vessels, masks, num_data, iter_time, phase, total_time): vessels_in_mask, generated_in_mask = utils.pixel_values_in_mask( vessels, generated, masks) avg_pt = (total_time / num_data) * 1000 # average processing tiem # evaluation auc_roc = utils.AUC_ROC(vessels_in_mask, generated_in_mask) auc_pr = utils.AUC_PR(vessels_in_mask, generated_in_mask) binarys_in_mask = utils.threshold_by_otsu(generated, masks) dice_coeff = utils.dice_coefficient_in_train(vessels_in_mask, binarys_in_mask) acc, sensitivity, specificity = utils.misc_measures( vessels_in_mask, binarys_in_mask) score = auc_pr + auc_roc + dice_coeff + acc + sensitivity + specificity # print information ord_output = collections.OrderedDict([('auc_pr', auc_pr), ('auc_roc', auc_roc), ('dice_coeff', dice_coeff), ('acc', acc), ('sensitivity', sensitivity), ('specificity', specificity), ('score', score), ('best_dice_coeff', self.best_dice_coeff), ('avg_pt', avg_pt)]) utils.print_metrics(iter_time, ord_output) # write in tensorboard if phase == 'train': self.model.measure_assign(auc_pr, auc_roc, dice_coeff, acc, sensitivity, specificity, score, iter_time) if phase == 'test': # write in npy format for evaluation utils.save_obj(vessels_in_mask, generated_in_mask, os.path.join(self.auc_out_dir, "auc_roc.npy"), os.path.join(self.auc_out_dir, "auc_pr.npy")) return dice_coeff
if check_validation_batch: utils.check_input(imgs, segs, train_img_check_dir) check_validation_batch = False pred = network.predict(imgs, batch_size=batch_size, verbose=0) loss = network.evaluate(imgs, segs, batch_size=batch_size, verbose=0) losses += [loss] * imgs.shape[0] pred_masks.append(pred) gt_masks.append(segs) fundus_imgs.append(imgs) pred_masks = np.concatenate(pred_masks, axis=0) gt_masks = np.concatenate(gt_masks, axis=0) fundus_imgs = np.concatenate(fundus_imgs, axis=0) # evaluate results auroc = utils.AUC_ROC(gt_masks, pred_masks) aupr = utils.AUC_PR(gt_masks, pred_masks) utils.print_metrics(epoch + 1, auroc=auroc, aupr=aupr, validation_losses=np.mean(losses)) # save the weight if aupr > best_aupr: network.save_weights(os.path.join(model_out_dir, "network_{}.h5".format(epoch + 1))) best_aupr = aupr # save validation results for index in range(pred_masks.shape[0]): Image.fromarray((pred_masks[index, ..., 0] * 255).astype(np.uint8)).save(os.path.join(img_out_dir, str(epoch + 1) + "_{:02}_segmented.png".format(index + 1))) Image.fromarray((gt_masks[index, ..., 0] * 255).astype(np.uint8)).save(os.path.join(img_out_dir, str(epoch + 1) + "_{:02}_gt.png".format(index + 1))) Image.fromarray((fundus_imgs[index, ...] * 255).astype(np.uint8)).save(os.path.join(img_out_dir, str(epoch + 1) + "_{:02}_fundus_patch.png".format(index + 1))) sys.stdout.flush()
# G gan_x_test, gan_y_test=utils.input2gan(val_imgs, val_vessels, d_out_shape) loss,acc=gan.evaluate(gan_x_test,gan_y_test, batch_size=batch_size, verbose=0) utils.print_metrics(n_round+1, acc=acc, loss=loss, type='GAN') # save the weights g.save_weights(os.path.join(model_out_dir,"g_{}_{}_{}.h5".format(n_round,FLAGS.discriminator,FLAGS.ratio_gan2seg))) # update step sizes, learning rates scheduler.update_steps(n_round) K.set_value(d.optimizer.lr, scheduler.get_lr()) K.set_value(gan.optimizer.lr, scheduler.get_lr()) # evaluate on test images if n_round in rounds_for_evaluation: generated=g.predict(test_imgs,batch_size=batch_size) generated=np.squeeze(generated, axis=3) vessels_in_mask, generated_in_mask = utils.pixel_values_in_mask(test_vessels, generated , test_masks) auc_roc=utils.AUC_ROC(vessels_in_mask,generated_in_mask,os.path.join(auc_out_dir,"auc_roc_{}.npy".format(n_round))) auc_pr=utils.AUC_PR(vessels_in_mask, generated_in_mask,os.path.join(auc_out_dir,"auc_pr_{}.npy".format(n_round))) binarys_in_mask=utils.threshold_by_otsu(generated,test_masks) dice_coeff=utils.dice_coefficient_in_train(vessels_in_mask, binarys_in_mask) acc, sensitivity, specificity=utils.misc_measures(vessels_in_mask, binarys_in_mask) utils.print_metrics(n_round+1, auc_pr=auc_pr, auc_roc=auc_roc, dice_coeff=dice_coeff, acc=acc, senstivity=sensitivity, specificity=specificity, type='TESTING') # print test images segmented_vessel=utils.remain_in_mask(generated, test_masks) for index in range(segmented_vessel.shape[0]): Image.fromarray((segmented_vessel[index,:,:]*255).astype(np.uint8)).save(os.path.join(img_out_dir,str(n_round)+"_{:02}_segmented.png".format(index+1)))
for index_pred in range(len(pred_filenames)): # build array of gt if index_gt < len(gt_filenames) and os.path.basename( pred_filenames[index_pred]).replace( ".jpg", "") in os.path.basename( gt_filenames[index_gt]): gt = utils.imagefiles2arrs( gt_filenames[index_gt:index_gt + 1]).astype(np.uint8)[0, ...] gt_all[index_pred, ...] = gt index_gt += 1 # compute sensitivity and specificity aupr_all, best_f1_all, best_f1_thresh_all, sen_all, ppv_all = utils.pr_metric( gt_all, pred_all) auroc_all = utils.AUC_ROC(gt_all, pred_all) aupr_training, best_f1_training, best_f1_thresh_training, sen_training, ppv_training = utils.pr_metric( gt_all[training_indices], pred_all[training_indices]) auroc_training = utils.AUC_ROC(gt_all[training_indices], pred_all[training_indices]) aupr_val, best_f1_val, best_f1_thresh_val, sen_val, ppv_val = utils.pr_metric( gt_all[val_indices], pred_all[val_indices]) auroc_val = utils.AUC_ROC(gt_all[val_indices], pred_all[val_indices]) # print results and store to lists sens = [sen_all, sen_training, sen_val] ppvs = [ppv_all, ppv_training, ppv_val] auprs = [aupr_all, aupr_training, aupr_val] aurocs = [auroc_all, auroc_training, auroc_val] best_f1s = [best_f1_all, best_f1_training, best_f1_val] best_f1_threshs = [
check_train_batch = False total_loss, loss_all, loss_vessel = network.train_on_batch( [imgs, vessels], [segs, segs[:, ::32, ::32, :]]) losses_all += [loss_all] * len(filenames) losses_vessel += [loss_vessel] * len(filenames) print "loss_all: {}, loss_vessel: {}".format(np.mean(losses_all), np.mean(losses_vessel)) # evaluate on validation set if check_validation_batch: utils.check_input(val_imgs, val_masks, val_img_check_dir) # utils.check_input(val_vessels, val_masks, val_img_check_dir) check_test_batch = False val_generated_masks_f_v, val_generate_masks_v = network.predict( [val_imgs, val_vessels], batch_size=batch_size, verbose=0) auroc = utils.AUC_ROC(val_masks, val_generated_masks_f_v) aupr = utils.AUC_PR(val_masks, val_generated_masks_f_v) val_generated_masks_f_v = np.round(val_generated_masks_f_v) cm, spe, sen, dice_val, jaccard_val = utils.seg_metrics( val_masks, val_generated_masks_f_v) utils.print_metrics(epoch + 1, jaccard_val=jaccard_val, dice_val=dice_val, sen=sen, auroc=auroc, aupr=aupr) # save the weight network.save_weights( os.path.join(model_out_dir, "network_{}.h5".format(epoch)))