def predict_average(self): y_lst = [] for i_fold in range(6): for k in range(18, 19 + 1): for epoch in range(99, 100 + 1): y = self.predict(i_fold=i_fold, k=k, epoch=epoch) y_lst.append(y) y_avg = np.mean(y_lst, axis=0) y_avg_bin = self.get_bin(y_avg) img_clean = self.img_x[..., :3] y_avg_bin_overlay = semi_transparant(img_clean, y_avg_bin) concurrent([y_avg[..., 1], y_avg_bin, y_avg_bin_overlay, img_clean]) if 0: path_save = f'/scratch/lameeus/data/ghent_altar/output/hierarchy/10_lamb/' \ f'paintloss_tiunet_enc{self.fixed_enc}.png' imsave(path_save, y_avg_bin) if 1: path_save = f'/scratch/lameeus/data/ghent_altar/output/hierarchy/10_lamb/' \ f'paintloss_overlay_tiunet_enc{self.fixed_enc}.png' imsave(path_save, y_avg_bin_overlay) return 1
def folds_annot(): train_data = get_10lamb_old(5) img_x, _, _, _ = get_training_data(train_data) img_clean = img_x[..., :3] lst_get = [get_borders1, get_borders2, get_borders3, get_borders4, get_borders5, get_borders6] for i_fold in range(6): img_annot = imread(f'/home/lameeus/data/ghent_altar/input/hierachy/10_lamb/annotations/kfold/annot_{i_fold+1}.png') y1 = annotations2y(img_annot, thresh=.8)[..., 1] a = semi_transparant(img_clean, y1.astype(bool)) w0, w1, h0, h1 = lst_get[i_fold]() clean_annot_crop = a[h0:h1, w0:w1, :] img_clean_crop = img_clean[h0:h1, w0:w1, :] if 0: concurrent([img_clean_crop, clean_annot_crop]) folder_save = '/scratch/lameeus/data/ghent_altar/input/hierarchy/10lamb/ifolds' imsave(os.path.join(folder_save, f'clean_crop_ifold{i_fold}.png'), img_clean_crop) imsave(os.path.join(folder_save, f'clean_annot_crop_ifold{i_fold}.png'), clean_annot_crop) pass
def continues_learning(): folder = '/home/lameeus/data/ghent_altar/input/hierarchy/13_small' im_clean = imread(os.path.join(folder, 'clean.png'))[..., :3] im_annot0 = imread(os.path.join(folder, 'annot.tif')) im_annot1 = imread(os.path.join(folder, 'clean_annot_practical.png')) y_true = annotations2y(im_annot0) y_true_extra = annotations2y(im_annot1, thresh=.9) folder = '/home/lameeus/data/ghent_altar/output/hierarchy/13_small/practical_annotations' y_pred0 = imread(os.path.join(folder, 'pred_transfer_kfoldenc2_ifold0_avg.png')) folder = '/home/lameeus/data/ghent_altar/output/hierarchy/13_small' y_pred1 = imread(os.path.join(folder, 'pred_transfer_kfoldenc2_ifold0_avg_epoch50_J0427.png')) from performance.testing import optimal_test_thresh_equal_distribution from scripts.scripts_performance.main_performance import foo_performance from figures_paper.overlay import semi_transparant def get_bin(y_pred): assert len(y_pred.shape) == 2 y_pred01 = np.stack([1-y_pred, y_pred], axis=-1) thresh = optimal_test_thresh_equal_distribution(y_true, y_pred01) print(foo_performance(y_true, y_pred01, thresh)) y_pred_bin = y_pred >= thresh return y_pred_bin y_pred0_bin = get_bin(y_pred0) y_pred1_bin = get_bin(y_pred1) y_pred0_bin_fancy = semi_transparant(im_clean, y_pred0_bin) y_pred1_bin_fancy = semi_transparant(im_clean, y_pred1_bin) concurrent([im_clean, y_true[..., 0], y_true_extra[..., 0], y_pred0, y_pred1, y_pred0_bin, y_pred1_bin, y_pred0_bin_fancy, y_pred1_bin_fancy]) folder_save = '/home/lameeus/data/ghent_altar/output/hierarchy/13_small/fancy' imsave(os.path.join(folder_save, 'overalytrain10.png'), y_pred0_bin_fancy) imsave(os.path.join(folder_save, 'overalytrain10train13.png'), y_pred1_bin_fancy) return
def predict_compare_regular(self): img_y_all = self.k_fold_train_data.get_train_data_all().get_y_train() lst_get = [ get_borders1, get_borders2, get_borders3, get_borders4, get_borders5, get_borders6 ] img_clean = self.img_x[..., :3] for i_fold in range(6): y_lst = [] # Average prediction print(f'i_fold = {i_fold}') for k in [17, 18, 19]: print(f'k = {k}') for epoch in [36, 37, 38, 39, 40]: print(f'epoch = {epoch}') y = self.predict_regular(i_fold=i_fold, k=k, epoch=epoch) y_lst.append(y) y_avg = np.mean(y_lst, axis=0) y_avg_bin = self.get_bin(y_avg) # Performance img_y_te = self.k_fold_train_data.k_split_i(i_fold).get_y_test() thresh = optimal_test_thresh_equal_distribution(img_y_all, y_avg) perf = foo_performance(img_y_te, y_avg, thresh) # CROP w0, w1, h0, h1 = lst_get[i_fold]() y_avg_bin_crop = y_avg_bin[h0:h1, w0:w1] clean_crop = img_clean[h0:h1, w0:w1, :] y_avg_bin_transparent_crop = semi_transparant( clean_crop, y_avg_bin_crop) if 0: concurrent( [clean_crop, y_avg_bin_crop, y_avg_bin_transparent_crop]) folder_save = '/scratch/lameeus/data/ghent_altar/output/hierarchy/10_lamb/ifolds_regular_tiunet' filename = f'_tiunet_ifold{i_fold}_jacc{perf["jaccard"]:.3f}.png' # Save y_bin imsave(os.path.join(folder_save, 'binpred' + filename), y_avg_bin_crop, b_check_duplicate=False) # Save overlay imsave(os.path.join(folder_save, 'overlay' + filename), y_avg_bin_transparent_crop, b_check_duplicate=False)
def combine_stitches(): folder = '/home/lameeus/data/ghent_altar/output/hierarchy/10_lamb/inpainting' im_base = imread(os.path.join(folder, 'inpainting_stitch_f2_g3_fixed_v0_linear_c256.png')) im_leftbot = imread(os.path.join(folder, 'inpainting_stitch_f2_g3_fixed_v1_linear_c256_manual.png')) im_base[3*256:, :(3)*256, :] = im_leftbot[3*256:, :(3)*256, :] if 0: plt.imshow(im_base) imsave(os.path.join(folder, 'inpainting_comb.png'), im_base) from figures_paper.overlay import semi_transparant im_clean = imread('/home/lameeus/data/ghent_altar/input/hierarchy/10_lamb/clean.png') im_paintloss = imread('/home/lameeus/data/ghent_altar/output/hierarchy/10_lamb/detection_updated.png') im_overlay = semi_transparant(im_clean, im_paintloss) if 1: plt.imshow(im_overlay) folder = '/home/lameeus/data/ghent_altar/output/hierarchy/10_lamb/fancy' imsave(os.path.join(folder, 'det_overlay.png'), im_overlay)
def foo(n_segm, b=0): y_pred = n_segm.predict(self.img_x) thresh_single = optimal_test_thresh_equal_distribution( self.img_y_te, y_pred) # data_single_i = {'k': self.k, # 'i_fold': i_fold, # 'epoch': epoch} print(foo_performance(self.img_y_te, y_pred, thresh_single)) img_clean = self.img_x[..., :3] concurrent([ img_clean, y_pred[..., 1], y_pred[..., 1] >= thresh_single, semi_transparant(img_clean, y_pred[..., 1] >= thresh_single) ]) if b: from data.datatools import imsave folder = '/home/lameeus/data/ghent_altar/output/hierarchy/' info_epoch = f'_epoch{n_segm.epoch}' if n_segm.epoch > 0 else '' filename = folder + f'13_small/pred_transfer_kfoldenc{self.fixed_enc}_ifold{self.i_fold}_avg{info_epoch}.png' imsave(filename, y_pred[..., 1])
def predict_compare(self): img_y_all = self.k_fold_train_data.get_train_data_all().get_y_train() lst_get = [ get_borders1, get_borders2, get_borders3, get_borders4, get_borders5, get_borders6 ] img_clean = self.img_x[..., :3] for i_fold in range(6): for i_fixed_enc in range(3): self.fixed_enc = i_fixed_enc y_lst = [] # Average prediction for k in [17, 18, 19]: for epoch in [96, 97, 98, 99, 100]: y = self.predict(i_fold=i_fold, k=k, epoch=epoch) y_lst.append(y) y_avg = np.mean(y_lst, axis=0) y_avg_bin = self.get_bin(y_avg) # Performance img_y_te = self.k_fold_train_data.k_split_i( i_fold).get_y_test() thresh = optimal_test_thresh_equal_distribution( img_y_all, y_avg) perf = foo_performance(img_y_te, y_avg, thresh) # CROP w0, w1, h0, h1 = lst_get[i_fold]() y_avg_bin_crop = y_avg_bin[h0:h1, w0:w1] clean_crop = img_clean[h0:h1, w0:w1, :] y_avg_bin_transparent_crop = semi_transparant( clean_crop, y_avg_bin_crop) if 0: concurrent([ clean_crop, y_avg_bin_crop, y_avg_bin_transparent_crop ]) # Save if self.fixed_enc == 0: info_enc = 'Train' elif self.fixed_enc == 1: info_enc = 'Fixed' elif self.fixed_enc == 2: info_enc = 'FixedTrain' folder_save = '/scratch/lameeus/data/ghent_altar/output/hierarchy/10_lamb/ifolds' filename = f'_enc{info_enc}_ifold{i_fold}_jacc{perf["jaccard"]:.3f}.png' # Save y_bin imsave(os.path.join(folder_save, 'binpred' + filename), y_avg_bin_crop, b_check_duplicate=False) # Save overlay imsave(os.path.join(folder_save, 'overlay' + filename), y_avg_bin_transparent_crop, b_check_duplicate=False)
def main(): b_encoder_fixed = False info_enc_fixed = '_enc_fixed' folder_weights = '/scratch/lameeus/data/ghent_altar/net_weight/10lamb_kfold_pretrained' folder_save = '/home/lameeus/data/ghent_altar/dataframes' filename_single = f'pretrained_unet_10lamb_kfold_single' filename_avg_pred = f'pretrained_unet_10lamb_kfold_avgpred' folder_weights += info_enc_fixed if b_encoder_fixed else '' filename_single += info_enc_fixed if b_encoder_fixed else '' filename_avg_pred += info_enc_fixed if b_encoder_fixed else '' fold_range = range(6) # fold_range = [0, 1] k = 10 epoch_range = range(1, 40 + 1) w_ext_in = 28 k_fold_train_data = get_10lamb_6patches(5) # 5 is the number of modalities train_data_all = k_fold_train_data.get_train_data_all() img_x = train_data_all.get_x_train() img_x = rescale0to1(img_x) img_clean = img_x[..., :3] img_y_all = train_data_all.get_y_train() b_plot = False for i_fold in fold_range: print(i_fold) img_y_te = k_fold_train_data.k_split_i(i_fold).get_y_test() # Init for range epochs lst_data_single = [] lst_data_avg_pred = [] list_y_pred = [] model = None for epoch in np.sort(epoch_range)[::-1]: filepath_model = os.path.join( folder_weights, f'unet_enc_k{k}_ifold{i_fold}/w_{epoch}.h5') model = load_model_quick(filepath_model, model=model) n = NeuralNet(model, w_ext=w_ext_in) y_pred = n.predict(img_x) """ Average out predictions """ list_y_pred.append(y_pred) y_avg_pred = np.mean(list_y_pred, axis=0) thresh_single = optimal_test_thresh_equal_distribution( img_y_all, y_pred) thresh_avg_pred = optimal_test_thresh_equal_distribution( img_y_all, y_avg_pred) y_pred_bin = np.greater_equal(y_pred[..., 1], thresh_single) dict_perf = foo_performance(img_y_te, y_pred, thresh_single) print(dict_perf) if b_plot: concurrent([ y_pred_bin, img_clean, semi_transparant(img_clean, y_pred_bin), semi_transparant(img_clean, img_y_te[..., 1].astype(bool)) ]) data_single_i = {'k': k, 'i_fold': i_fold, 'epoch': epoch} data_avg_pred_i = { 'k': k, 'i_fold': i_fold, 'epoch_start': epoch, 'epoch_end': max(epoch_range) } data_single_i.update(dict_perf) data_avg_pred_i.update( foo_performance(img_y_te, y_avg_pred, thresh_avg_pred)) lst_data_single.append(data_single_i) lst_data_avg_pred.append(data_avg_pred_i) df_single = pd.DataFrame(lst_data_single) df_avg_pred = pd.DataFrame(lst_data_avg_pred) path_single = os.path.join(folder_save, filename_single + '.csv') path_avg_pred = os.path.join(folder_save, filename_avg_pred + '.csv') pandas_save(path_single, df_single, append=True) pandas_save(path_avg_pred, df_avg_pred, append=True) return
def transfer_learning( epoch=25, # Could check a few b_plot=False): d = 2 # 1, 2 img_x, img_y_val = data_lamb() k = 10 model_name = 'ti-unet' w_ext = 10 if d == 1 else 26 # train_data: y_pred_lst = [] n = ['clean'] # train_data_lst = ['1319_10', '10', '1319', '1319_101319'] train_data_lst = ['10nat', '1319_10nat', '1319_10nat1319', '1319'] data_i_lst = {} for train_data in train_data_lst: print(train_data) epoch_start = 50 epoch_corr = epoch + epoch_start if train_data[:5] == '1319_' else epoch if train_data == '1319': epoch_corr = 50 path = f'C:/Users/admin/Data/ghent_altar/net_weight/{train_data}/{model_name}_d{d}_k{k}/w_{epoch_corr}.h5' try: model = load_model_quick(path) except Exception as e: print(e) continue neural_net = NeuralNet(model, w_ext=w_ext, norm_x=True) y_pred = neural_net.predict(img_x) # baseline data_i = _eval_func_single(img_y_val, y_pred, metric='kappa') print(data_i) if 0: """ Checking which baseline ~ .22 i = 0: .268, Remove huge improvement ( a lot of "green" background annotated as paint loss) i = 1: .228 Keep! i = 2: .179 keep! Drop (keep!! i = 3: .159 keep! Even more important i = 4: .252 Remove (huge problem right top) i = 5: .233 Keep, quit relevant """ from datasets.default_trainingsets import get_10lamb_6patches kFoldTrainData = get_10lamb_6patches(5) _eval_func_single( kFoldTrainData.k_split_i(0).get_y_train(), y_pred, metric='kappa') # Check what is influence without! data_i_lst[train_data] = data_i data_i = _eval_func_single(img_y_val, y_pred, metric='jaccard') print(data_i) y_pred_lst.append(y_pred) n.append(train_data) # plt.imshow(neural_net.predict(img_x[::2,::2,:])[..., 1]) if b_plot: concurrent([img_x[..., :3]] + [a[..., 1] for a in y_pred_lst], n) if 0: from figures_paper.overlay import semi_transparant from data.datatools import imread, imsave t = [data_i_lst[n_i]['thresh'] for n_i in train_data_lst] p = [] for i, train_data in enumerate(train_data_lst): b = np.greater_equal(y_pred_lst[i][..., 1], t[i]) k = semi_transparant(img_x[..., :3], b, 0) p.append(k) imsave( os.path.join( "C:/Users/admin/OneDrive - ugentbe/data/images_paper", train_data + '.png'), k) concurrent(p) return data_i_lst
print(data_i) # Show before saving all from figures_paper.overlay import semi_transparant l = [im_clean] for i, b in enumerate([b_annot, b_sh, b_pred, b_pred_unet]): if i == 0: # Probably not really interesting to change the colour l.append( semi_transparant(im_clean, b.astype(bool), color1='cyan', color2='grey', transparency=0, transparancy2=.5)) else: l.append( semi_transparant(im_clean, b.astype(bool), color2='grey', transparency=0, transparancy2=.5)) concurrent(l) folder_out = 'C:/Users/admin/OneDrive - ugentbe/data/images_paper' imsave(os.path.join(folder_out, f'{data}_clean.png'), im_clean) imsave(os.path.join(folder_out, f'{data}_annot.png'), l[1])