def test_sub_blocks(): start = [8] quant = [[64,8,8]] color = ['hsv'] for s in start: for q in quant: print('--- # -- ') db_desc = SubBlockDescriptor(db) q1_desc = SubBlockDescriptor(qs1_w1) q2_desc = SubBlockDescriptor(qs2_w1,masks=True,mask_path=mask_root) db_desc.compute_descriptors(grid_blocks=[s,s],quantify=q,color_space=color[0]) q1_desc.compute_descriptors(grid_blocks=[s,s],quantify=q,color_space=color[0]) q2_desc.compute_descriptors(grid_blocks=[s,s],quantify=q,color_space=color[0]) # -- SEARCH -- # q1_search = Searcher(db_desc.result,q1_desc.result) q2_search = Searcher(db_desc.result,q2_desc.result) q1_desc.clear_memory() q2_desc.clear_memory() db_desc.clear_memory() q1_search.search(limit=3) q2_search.search(limit=3) # -- EVALUATION -- # q1_eval = EvaluateDescriptors(q1_search.result,res_root+os.sep+'gt_corresps1.pkl') q2_eval = EvaluateDescriptors(q2_search.result,res_root+os.sep+'gt_corresps2.pkl') q1_search.clear_memory() q2_search.clear_memory() q1_eval.compute_mapatk(limit=1) q2_eval.compute_mapatk(limit=1) filename = res_root+os.sep+'tests'+os.sep+'sub_res_'+str(s)+'_'+str(q[0])+'.pkl' with open(filename,'wb') as f: pickle.dump(q1_eval.score,f) pickle.dump(q2_eval.score,f) print('--- # -- ')
def test_level_desc(): level = [2] start = [5,6] jump = [2] quant = [[16,8,8],[24,12,12]] color = ['hsv'] for l in level: for s in start: for j in jump: for q in quant: print('--- # -- ') db_desc = LevelDescriptor(db) q1_desc = LevelDescriptor(qs1_w1) q2_desc = LevelDescriptor(qs2_w1,masks=True,mask_path=mask_root) db_desc.compute_descriptors(levels=l,init_quant=q,start=s,jump=j,color_space=color[0]) q1_desc.compute_descriptors(levels=l,init_quant=q,start=s,jump=j,color_space=color[0]) q2_desc.compute_descriptors(levels=l,init_quant=q,start=s,jump=j,color_space=color[0]) # -- SEARCH -- # q1_search = Searcher(db_desc.result,q1_desc.result) q2_search = Searcher(db_desc.result,q2_desc.result) db_desc.clear_memory() q1_desc.clear_memory() q2_desc.clear_memory() q1_search.search(limit=3) q2_search.search(limit=3) # -- EVALUATION -- # q1_eval = EvaluateDescriptors(q1_search.result,res_root+os.sep+'gt_corresps1.pkl') q2_eval = EvaluateDescriptors(q2_search.result,res_root+os.sep+'gt_corresps2.pkl') q1_search.clear_memory() q2_search.clear_memory() q1_eval.compute_mapatk(limit=1) q2_eval.compute_mapatk(limit=1) filename = res_root+os.sep+'tests'+os.sep+'lev_res_'+str(l)+'_'+str(s)+'_'+str(j)+'_'+str(q[0])+'.pkl' with open(filename,'wb') as f: pickle.dump(q1_eval.score,f) pickle.dump(q2_eval.score,f) print('--- # -- ')
def evaluate(): # -- OPEN FILES -- # print('Opening files') with open(res_root+os.sep+'qs1_bbox.pkl','rb') as f: qs1_bbox = pickle.load(f) with open(res_root+os.sep+'qs2_bbox.pkl','rb') as f: qs2_bbox = pickle.load(f) with open(res_root+os.sep+'qs1_result.pkl','rb') as f: qs1_result = pickle.load(f) with open(res_root+os.sep+'qs2_result.pkl','rb') as f: qs2_result = pickle.load(f) print('Done') # -- EVALUATE BBOX QS1 -- # print('Evaluating QS1:') qs1_box_eval = EvaluateIoU(qs1_bbox,qs1_w2+os.sep+'text_boxes.pkl') qs1_mask_eval = EvaluateMasks(qs1_mask,qs1_w2) qs1_desc_eval = EvaluateDescriptors(qs1_result,qs1_w2+os.sep+'gt_corresps.pkl') qs1_box_eval.compute_iou() qs1_mask_eval.compute_fscore() qs1_desc_eval.compute_mapatk(limit=1) print('DESC MAP1: ['+str(qs1_desc_eval.score)+']') qs1_desc_eval.compute_mapatk(limit=3) print('DESC MAP3: ['+str(qs1_desc_eval.score)+']') print('BBOX IOU: ['+str(qs1_box_eval.score)+']') print('MASK FSCORE: ['+str(qs1_mask_eval.score)+']') print('Done') # -- EVALUATE BBOX QS2 -- # print('Evaluating QS2:') qs2_box_eval = EvaluateIoU(qs2_bbox,qs2_w2+os.sep+'text_boxes.pkl') qs2_mask_eval = EvaluateMasks(qs2_mask,qs2_w2) qs2_desc_eval = EvaluateDescriptors(qs2_result,qs2_w2+os.sep+'gt_corresps.pkl') qs2_box_eval.compute_iou() qs2_mask_eval.compute_fscore() qs2_desc_eval.compute_mapatk(limit=1) print('DESC MAP1: ['+str(qs2_desc_eval.score)+']') qs2_desc_eval.compute_mapatk(limit=3) print('DESC MAP3: ['+str(qs2_desc_eval.score)+']') print('BBOX IOU: ['+str(qs2_box_eval.score)+']') print('MASK FSCORE: ['+str(qs2_mask_eval.score)+']') print('Done')
def main(eval_=True): global_start = time.time() print('-- READING IMAGES --') start = time.time() db_paths = sorted(glob(db_path + os.sep + '*.jpg')) qs_paths = sorted(glob(qs1_w5 + os.sep + '*.jpg')) db_images = [[cv2.imread(path)] for path in db_paths] qs_images = [cv2.imread(path) for path in qs_paths] print('-- DONE: Time: ' + str(time.time() - start)) print('-- DENOISING IMAGES --') start = time.time() if not os.path.isfile(res_root + os.sep + 'denoised.pkl'): denoiser = Denoiser(qs_images) qs_denoised = denoiser.denoise() with open(res_root + os.sep + 'denoised.pkl', 'wb') as ff: pickle.dump(qs_denoised, ff) else: with open(res_root + os.sep + 'denoised.pkl', 'rb') as ff: qs_denoised = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) print('-- DETECTING ORIENTATION --') start = time.time() if not (os.path.isfile(res_root + os.sep + 'angles.pkl') and os.path.isfile(res_root + os.sep + 'rotated.pkl') and os.path.isfile(res_root + os.sep + 'angles_real.pkl')): orientation = Orientation(qs_denoised) qs_angles, qs_angles_real, qs_rotated = orientation.compute_orientation( ) with open(res_root + os.sep + 'angles.pkl', 'wb') as ff: pickle.dump(qs_angles, ff) with open(res_root + os.sep + 'angles_real.pkl', 'wb') as ff: pickle.dump(qs_angles_real, ff) with open(res_root + os.sep + 'rotated.pkl', 'wb') as ff: pickle.dump(qs_rotated, ff) else: with open(res_root + os.sep + 'angles.pkl', 'rb') as ff: qs_angles = pickle.load(ff) with open(res_root + os.sep + 'angles_real.pkl', 'rb') as ff: qs_angles_real = pickle.load(ff) with open(res_root + os.sep + 'rotated.pkl', 'rb') as ff: qs_rotated = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) if eval_: print('-- EVALUATING ANGLES --') start = time.time() angle_evaluator = EvaluateAngles(qs_angles, qs1_w5 + os.sep + 'angles_qsd1w5.pkl') score = angle_evaluator.evaluate(degree_margin=1.5) print('-- DONE: Time: ' + str(time.time() - start)) print('-- SPLITTING IMAGES --') start = time.time() if not (os.path.isfile(res_root + os.sep + 'splitted.pkl') and os.path.isfile(res_root + os.sep + 'qs_displays.pkl')): spliter = SplitImages(qs_rotated) qs_splitted, qs_displays = spliter.get_paintings() with open(res_root + os.sep + 'splitted.pkl', 'wb') as ff: pickle.dump(qs_splitted, ff) with open(res_root + os.sep + 'qs_displays.pkl', 'wb') as ff: pickle.dump(qs_displays, ff) else: with open(res_root + os.sep + 'splitted.pkl', 'rb') as ff: qs_splitted = pickle.load(ff) with open(res_root + os.sep + 'qs_displays.pkl', 'rb') as ff: qs_displays = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) print('-- COMPUTE FOREGROUND --') start = time.time() if not (os.path.isfile(res_root + os.sep + 'qs_masks_rot.pkl') and os.path.isfile(res_root + os.sep + 'qs_bboxs_rot.pkl')): removal = BackgroundRemoval(qs_splitted) qs_masks_rot, qs_bboxs_rot = removal.remove_background() with open(res_root + os.sep + 'qs_masks_rot.pkl', 'wb') as ff: pickle.dump(qs_masks_rot, ff) with open(res_root + os.sep + 'qs_bboxs_rot.pkl', 'wb') as ff: pickle.dump(qs_bboxs_rot, ff) else: with open(res_root + os.sep + 'qs_masks_rot.pkl', 'rb') as ff: qs_masks_rot = pickle.load(ff) with open(res_root + os.sep + 'qs_bboxs_rot.pkl', 'rb') as ff: qs_bboxs_rot = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) print('-- UNROTATE MASKS AND FOREGROUND BOUNDING BOXES --') start = time.time() if not (os.path.isfile(res_root + os.sep + 'qs_masks.pkl') and os.path.isfile(res_root + os.sep + 'qs_bboxs.pkl')): undo_rotation = Unrotate(qs_images) qs_masks, qs_bboxs = undo_rotation.unrotate(qs_angles, qs_bboxs_rot, qs_masks_rot, qs_displays) with open(res_root + os.sep + 'qs_masks.pkl', 'wb') as ff: pickle.dump(qs_masks, ff) with open(res_root + os.sep + 'qs_bboxs.pkl', 'wb') as ff: pickle.dump(qs_bboxs, ff) else: with open(res_root + os.sep + 'qs_masks.pkl', 'rb') as ff: qs_masks = pickle.load(ff) with open(res_root + os.sep + 'qs_bboxs.pkl', 'rb') as ff: qs_bboxs = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) print('-- COMPUTE FRAMES OUTPUT PICKLE --') start = time.time() if not os.path.isfile(res_root + os.sep + 'frames.pkl'): qs_frames = [] for ind, bboxs in enumerate(qs_bboxs): qs_frames.append([]) for ind2, bbox in enumerate(bboxs): qs_frames[-1].append([qs_angles[ind], bbox]) with open(res_root + os.sep + 'frames.pkl', 'wb') as ff: pickle.dump(qs_frames, ff) else: with open(res_root + os.sep + 'frames.pkl', 'rb') as ff: qs_frames = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) print('-- COMPUTE TEXTBOXES --') start = time.time() if not os.path.isfile(res_root + os.sep + 'text_masks.pkl'): text_removal = TextDetection(qs_splitted) text_masks = text_removal.detect() with open(res_root + os.sep + 'text_masks.pkl', 'wb') as ff: pickle.dump(text_masks, ff) else: with open(res_root + os.sep + 'text_masks.pkl', 'rb') as ff: text_masks = pickle.load(ff) print('-- DONE: Time: ' + str(time.time() - start)) print('-- COMPUTE DESCRIPTORS --') start = time.time() #db_desc = SIFTDescriptor(db_images,None,None) #qs_desc = SIFTDescriptor(qs_splitted,mask_list=qs_masks_rot,bbox_list=text_masks) db_desc = ORBDescriptor(db_images, None, None) qs_desc = ORBDescriptor(qs_splitted, mask_list=qs_masks_rot, bbox_list=text_masks) db_desc.compute_descriptors() qs_desc.compute_descriptors() print('-- DONE: Time: ' + str(time.time() - start)) print('-- COMPUTE MATCHES --') start = time.time() matcher = MatcherFLANN(db_desc.result, qs_desc.result, flag=True) matcher.match(min_matches=12, match_ratio=0.65) with open('../results/result.pkl', 'wb') as ff: pickle.dump(matcher.result, ff) print('-- DONE: Time: ' + str(time.time() - start)) if eval_: print('-- EVALUATING DESCRIPTORS --') start = time.time() desc_evaluator = EvaluateDescriptors( matcher.result, qs1_w5 + os.sep + 'gt_corresps.pkl') desc_evaluator.compute_mapatk(limit=1) print('MAP@1: [{0}]'.format(desc_evaluator.score)) desc_evaluator.compute_mapatk(limit=5) print('MAP@5: [{0}]'.format(desc_evaluator.score)) print('-- Total time: ' + str(time.time() - global_start))
def test_qs1(): print('GET DB') db_text = get_text() db_images = [[cv2.imread(item)] for item in sorted(glob(os.path.join(db, "*.jpg")))] print('done') # -- READ -- # print('READ FILES') with open(res_root + os.sep + 'denoised.pkl', 'rb') as ff: qs1_images = pickle.load(ff) with open(res_root + os.sep + 'qs1_bbox.pkl', 'rb') as ff: qs1_bbox = pickle.load(ff) with open(res_root + os.sep + 'qs1_mask.pkl', 'rb') as ff: qs1_mask = pickle.load(ff) print('done') # -- TRANSFORM -- # print('computing combined descriptors') db_desc_trans = TransformDescriptor(db_images, None, None) db_desc_trans.compute_descriptors(transform_type='lbp') qs_desc_trans = TransformDescriptor(qs1_images, qs1_mask, None) qs_desc_trans.compute_descriptors(transform_type='lbp') # -- SEARCH -- # qs_searcher = Searcher(db_desc_trans.result, qs_desc_trans.result) qs_searcher.search(limit=5) print("Done.") qs_eval = EvaluateDescriptors(qs_searcher.result, os.path.join(qs1_w3, 'gt_corresps.pkl')) qs_eval.compute_mapatk(limit=1) print('DESC MAP1: [' + str(qs_eval.score) + ']') qs_eval.compute_mapatk(limit=5) print('DESC MAP5: [' + str(qs_eval.score) + ']') print('done') # -- TEXT -- # print('computing text descriptors') qs_desc = TextDescriptor(qs1_images, qs1_bbox) qs_desc.compute_descriptors() # -- SEARCH -- # qs_searcher = SearcherText(db_text, qs_desc.result) qs_searcher.search(limit=5) print("Done.") qs_eval = EvaluateDescriptors(qs_searcher.result, os.path.join(qs1_w3, 'gt_corresps.pkl')) qs_eval.compute_mapatk(limit=1) print('DESC MAP1: [' + str(qs_eval.score) + ']') qs_eval.compute_mapatk(limit=5) print('DESC MAP5: [' + str(qs_eval.score) + ']') print('done') # -- COLOR -- # print('computing color descriptors') db_desc_col = SubBlockDescriptor(db_images, None) db_desc_col.compute_descriptors() qs_desc_col = SubBlockDescriptor(qs1_images, qs1_mask) qs_desc_col.compute_descriptors() # -- SEARCH -- # qs_searcher = Searcher(db_desc_col.result, qs_desc_col.result) qs_searcher.search(limit=5) print("Done.") qs_eval = EvaluateDescriptors(qs_searcher.result, os.path.join(qs1_w3, 'gt_corresps.pkl')) qs_eval.compute_mapatk(limit=1) print('DESC MAP1: [' + str(qs_eval.score) + ']') qs_eval.compute_mapatk(limit=5) print('DESC MAP5: [' + str(qs_eval.score) + ']') print('done') # -- SEARCH -- # qs_searcher = SearcherCombined(db_desc_col.result, qs_desc_col.result, db_desc_trans.result, qs_desc_trans.result, db_text, qs_desc.result, use_text=True) db_desc_col.clear_memory() qs_desc_col.clear_memory() db_desc_trans.clear_memory() qs_desc_trans.clear_memory() qs_searcher.search(limit=5) print("Done.") qs_eval = EvaluateDescriptors(qs_searcher.result, os.path.join(qs1_w3, 'gt_corresps.pkl')) qs_eval.compute_mapatk(limit=1) print('DESC MAP1: [' + str(qs_eval.score) + ']') qs_eval.compute_mapatk(limit=5) print('DESC MAP5: [' + str(qs_eval.score) + ']') print('done')
def main_qs1w3(evaluate=False): print("QSD1_W3") print("Reading Images...") db_text = get_text() denoiser = Denoise(qs1_w3) db_images = [[cv2.imread(item)] for item in sorted(glob(os.path.join(db,"*.jpg")))] print("Denoising Images...") denoiser.median_filter(3) qs_images = denoiser.tv_bregman(weight=0.01,max_iter=1000,eps=0.001,isotropic=True) # cv2.imwrite(r"C:\Users\PC\Documents\Roger\Master\M1\Project\Week3\tests_folder\testdenoise.png",qs_images[0][0]) # qs_images = [[cv2.imread(item)] for item in sorted(glob(os.path.join(qs1_w3,"*.jpg")))] # No denoising print("Done.") print("Obtaining textbox masks for each painting...") query_mask = [] query_bbox = [] for ind,img in enumerate(qs_images): print(ind,"of",len(qs_images)) for paint in img: mask, textbox = TextBoxRemoval(paint) bbox = [textbox[0][1],textbox[0][0],textbox[1][1],textbox[1][0]] query_mask.append([mask]) query_bbox.append([bbox]) cv2.imwrite(os.path.join(res_root,'QS1W3','{0:05d}.png'.format(ind)),mask) # cv2.imwrite(os.path.join(tests_path,'{0:05d}_mask.png'.format(ind)),mask) print("Done.") # input("Stop execution...") if evaluate: eval_iou = EvaluateIoU(query_bbox,os.path.join(qs1_w3,"text_boxes.pkl")) eval_iou.compute_iou() print("Bbox masks IoU:",eval_iou.score) # -- SAVE BBOXES -- # print("Writing final bboxs...") with open(os.path.join(res_root,"qs1_bbox.pkl"),'wb') as file: pickle.dump(query_bbox,file) print("Done.") # -- DESCRIPTORS -- # # -- COLOR -- # print('computing color descriptors') db_desc_col = SubBlockDescriptor(db_images,None) db_desc_col.compute_descriptors() qs_desc_col = SubBlockDescriptor(qs_images,query_mask) qs_desc_col.compute_descriptors() # -- SEARCH -- # qs_searcher = Searcher(db_desc_col.result,qs_desc_col.result) qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs1_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for color descriptors ", map_at_1) print("MAP@5 for color descriptors ", map_at_5) print("Done.") print("Writing color desc...") with open(os.path.join(res_root,"qs1_color_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- TRANSFORM -- # print('Obtaining transform descriptors.') db_desc_trans = TransformDescriptor(db_images,None,None) db_desc_trans.compute_descriptors(transform_type='hog') qs_desc_trans = TransformDescriptor(qs_images,query_mask,None) qs_desc_trans.compute_descriptors(transform_type='hog') # -- SEARCH -- # qs_searcher = Searcher(db_desc_trans.result,qs_desc_trans.result) qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs1_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for transform descriptors ", map_at_1) print("MAP@5 for transform descriptors ", map_at_5) print("Done.") print("Writing transform desc...") with open(os.path.join(res_root,"qs1_transform_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- TEXT -- # print('computing text descriptors') qs_desc_text = TextDescriptor(qs_images,query_bbox) qs_desc_text.compute_descriptors() save_text(qs_desc_text.result,'qs1') # -- SEARCH -- # qs_searcher = SearcherText(db_text,qs_desc_text.result) qs_desc_text.clear_memory() qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs1_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for text descriptors with levensthein ", map_at_1) print("MAP@5 for text descriptors with levensthein ", map_at_5) print("Done.") print("Writing text desc...") with open(os.path.join(res_root,"qs1_text_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- COMBINED-- # print('computing combined descriptors without text') # -- SEARCH -- # qs_searcher = SearcherCombined(db_desc_col.result,qs_desc_col.result,db_desc_trans.result,qs_desc_trans.result, db_text, qs_desc_text.result, False) db_desc_col.clear_memory() qs_desc_col.clear_memory() db_desc_trans.clear_memory() qs_desc_trans.clear_memory() qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs1_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for combined descriptors without text ", map_at_1) print("MAP@5 for combined descriptors without text ", map_at_5) print("Done.") print("Writing combined desc...") with open(os.path.join(res_root,"qs1_combined_without_text_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- COMBINED-- # print('computing combined descriptors with text') # -- SEARCH -- # qs_searcher = SearcherCombined(db_desc_col.result,qs_desc_col.result,db_desc_trans.result,qs_desc_trans.result, db_text, qs_desc_text.result, True) db_desc_col.clear_memory() qs_desc_col.clear_memory() db_desc_trans.clear_memory() qs_desc_trans.clear_memory() qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs1_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for combined descriptors with text ", map_at_1) print("MAP@5 for combined descriptors with text ", map_at_5) print("Done.") print("Writing combined desc...") with open(os.path.join(res_root,"qs1_combined_with_text_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.")
def main_qs2w3(evaluate=False): # -- GET IMAGES -- # print("Denoising Images...") folder_path = qs2_w3 db_text = get_text() denoiser = Denoise(folder_path) denoiser.median_filter(3) qs_images = denoiser.tv_bregman(weight=0.01,max_iter=1000,eps=0.001,isotropic=True) print("Done.") print("Obtaining list of paintings...") img2paintings = getListOfPaintings(qs_images,"EDGES") db_images = [] for db_path in sorted(glob(os.path.join(db,"*.jpg"))): db_images.append([cv2.imread(db_path)]) print("Done.") print("Obtaining background masks for each painting...") img2paintings_mask = [] for ind,img in enumerate(img2paintings): print(ind,"of",len(img2paintings)) img2paintings_mask.append([]) for painting in img: mask, mean_points = BackgroundMask4(painting) img2paintings_mask[-1].append({"painting":painting,"mask":mask,"mean_points":mean_points}) """UNCOMMENT LINE TO PRODUCE THE MASK TO UPLOAD TO THE SERVER""" cv2.imwrite(os.path.join(res_root,"QS2W3","{0:05d}.png".format(ind)),np.concatenate([item["mask"] for item in img2paintings_mask[-1]],axis=1)) print("Done.") print("Obtaining textbox masks for each painting...") img2paintings_items = [] img2paintings_bboxs = [] for ind,img in enumerate(img2paintings_mask): print(ind,"of",len(img2paintings_mask)) img2paintings_items.append([]) for painting_items in img: painting_masked = painting_items["painting"][painting_items["mean_points"]["top"]:painting_items["mean_points"]["bottom"],painting_items["mean_points"]["left"]:painting_items["mean_points"]["right"],:] mask, textbox = TextBoxRemoval(painting_masked) bbox_mask = np.zeros(shape=(painting_items["painting"].shape[0],painting_items["painting"].shape[1])) bbox_mask[painting_items["mean_points"]["top"]:painting_items["mean_points"]["bottom"],painting_items["mean_points"]["left"]:painting_items["mean_points"]["right"]] = mask bbox = [textbox[0][1],textbox[0][0],textbox[1][1],textbox[1][0]] bbox[1] = bbox[1] + painting_items["mean_points"]["top"] bbox[3] = bbox[3] + painting_items["mean_points"]["top"] bbox[0] = bbox[0] + painting_items["mean_points"]["left"] bbox[2] = bbox[2] + painting_items["mean_points"]["left"] bbox_detected = False if np.mean(mask) == 255 else True img2paintings_items[-1].append({"fg_mask":painting_items["mask"], "mean_points":painting_items["mean_points"], "bbox_mask":bbox_mask, "bbox":bbox, "bbox_detected":bbox_detected}) print("Done.") print("Combining masks in one picture + adapting bboxes...") final_masks = [] img2paintings_final_mask = [] img2paintings_fg_bboxs = [] final_bboxs = [] for ind,img in enumerate(img2paintings_items): print(ind,"of",len(img2paintings_items)) to_concatenate = [] fg_bboxs = [] bboxs = [] for ind2,painting_items in enumerate(img): total_mask = painting_items["fg_mask"] if painting_items["bbox_detected"]: total_mask[painting_items["bbox"][1]:painting_items["bbox"][3],painting_items["bbox"][0]:painting_items["bbox"][2]] = 0 to_concatenate.append(total_mask) if ind2 == 0: bboxs.append(painting_items["bbox"]) else: missing_size = 0 for item in to_concatenate[:-1]: missing_size += item.shape[1] bbox = painting_items["bbox"] bbox[0] += missing_size bbox[2] += missing_size bboxs.append(bbox) fg_bboxs.append(painting_items["mean_points"]) img2paintings_fg_bboxs.append(fg_bboxs) img2paintings_final_mask.append(to_concatenate) final_mask = np.concatenate(to_concatenate,axis=1) final_masks.append(final_mask) final_bboxs.append(bboxs) print("Done.") if evaluate: eval_iou = EvaluateIoU(final_bboxs,os.path.join(qs2_w3,"text_boxes.pkl")) eval_iou.compute_iou() print("Bbox masks IoU:",eval_iou.score) print("Writing final bboxs...") with open(os.path.join(res_root,"qs2_bbox.pkl"),"wb") as f: pickle.dump(final_bboxs,f) print("Done.") print("Writing final masks...") for ind,final_mask in enumerate(final_masks): cv2.imwrite(os.path.join(res_root,"QS2W3","{0:05d}.png".format(ind)),final_mask) print("Done.") print("Obtaining descriptors.") # -- DESCRIPTORS -- # # -- COLOR -- # print('computing color descriptors') db_desc_col = SubBlockDescriptor(db_images,None) db_desc_col.compute_descriptors() qs_desc_col = SubBlockDescriptor(img2paintings,img2paintings_final_mask) qs_desc_col.compute_descriptors() # -- SEARCH -- # qs_searcher = Searcher(db_desc_col.result,qs_desc_col.result) qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs2_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for color descriptors ", map_at_1) print("MAP@5 for color descriptors ", map_at_5) print("Done.") print("Writing color desc...") with open(os.path.join(res_root,"qs2_color_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- TRANSFORM -- # print('Obtaining transform descriptors.') db_desc_trans = TransformDescriptor(db_images,None,None) db_desc_trans.compute_descriptors(transform_type='hog') qs_desc_trans = TransformDescriptor(img2paintings,img2paintings_final_mask,img2paintings_fg_bboxs) qs_desc_trans.compute_descriptors(transform_type='hog') # -- SEARCH -- # qs_searcher = Searcher(db_desc_trans.result,qs_desc_trans.result) qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs2_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for transform descriptors ", map_at_1) print("MAP@5 for transform descriptors ", map_at_5) print("Done.") print("Writing color desc...") with open(os.path.join(res_root,"qs2_transform_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- TEXT -- # print('computing text descriptors') qs_desc_text = TextDescriptor(img2paintings,img2paintings_fg_bboxs) qs_desc_text.compute_descriptors() save_text(qs_desc_text.result,'qs2') # -- SEARCH -- # qs_searcher = SearcherText(db_text,qs_desc_text.result) qs_desc_text.clear_memory() qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs2_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for text descriptors with levenshtein ", map_at_1) print("MAP@5 for text descriptors with levenshtein", map_at_5) print("Done.") print("Writing text desc...") with open(os.path.join(res_root,"qs2_text_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- COMBINED-- # print('computing combined descriptors without text') # -- SEARCH -- # qs_searcher = SearcherCombined(db_desc_col.result,qs_desc_col.result,db_desc_trans.result,qs_desc_trans.result, db_text, qs_desc_text.result, False) db_desc_col.clear_memory() qs_desc_col.clear_memory() db_desc_trans.clear_memory() qs_desc_trans.clear_memory() qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs2_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for combined descriptors without text ", map_at_1) print("MAP@5 for combined descriptors without text ", map_at_5) print("Done.") print("Writing combined desc...") with open(os.path.join(res_root,"qs2_combined_without_text_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.") # -- COMBINED-- # print('computing combined descriptors with text') # -- SEARCH -- # print(db_text) print(qs_desc_text.result) qs_searcher = SearcherCombined(db_desc_col.result,qs_desc_col.result,db_desc_trans.result,qs_desc_trans.result, db_text, qs_desc_text.result, True) db_desc_col.clear_memory() qs_desc_col.clear_memory() db_desc_trans.clear_memory() qs_desc_trans.clear_memory() qs_searcher.search(limit=10) if evaluate: evaluator = EvaluateDescriptors(qs_searcher.result, qs2_corresps_path) map_at_1 = evaluator.compute_mapatk(1) map_at_5 = evaluator.compute_mapatk(5) print("MAP@1 for combined descriptors with text ", map_at_1) print("MAP@5 for combined descriptors with text ", map_at_5) print("Done.") print("Writing combined desc...") with open(os.path.join(res_root,"qs2_combined_with_text_result.pkl"),'wb') as file: pickle.dump(qs_searcher.result,file) print("Done.")